[fix] performance & misc bugs

This commit is contained in:
Evan Reichard 2023-10-03 16:47:38 -04:00
parent 5cd4e165b0
commit 317a1e3145
11 changed files with 386 additions and 354 deletions

View File

@ -8,7 +8,6 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
"time"
argon2 "github.com/alexedwards/argon2id" argon2 "github.com/alexedwards/argon2id"
"github.com/gabriel-vasile/mimetype" "github.com/gabriel-vasile/mimetype"
@ -130,7 +129,7 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
"WordsPerMinute": "N/A", "WordsPerMinute": "N/A",
} }
if document.Words != nil && *document.Words != 0 { if document.Words != nil && *document.Words != 0 && document.TotalTimeSeconds != 0 {
statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0) statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
} }
@ -158,37 +157,13 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
templateVars["Data"] = activity templateVars["Data"] = activity
} else if routeName == "home" { } else if routeName == "home" {
start_time := time.Now()
weekly_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{
UserID: userID,
Window: "WEEK",
})
if err != nil {
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
}
log.Debug("GetUserWindowStreaks - WEEK - ", time.Since(start_time))
start_time = time.Now()
daily_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{ streaks, _ := api.DB.Queries.GetUserStreaks(api.DB.Ctx, userID)
UserID: userID,
Window: "DAY",
})
if err != nil {
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
}
log.Debug("GetUserWindowStreaks - DAY - ", time.Since(start_time))
start_time = time.Now()
database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID) database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID)
log.Debug("GetDatabaseInfo - ", time.Since(start_time))
start_time = time.Now()
read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID) read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID)
log.Debug("GetDailyReadStats - ", time.Since(start_time))
templateVars["Data"] = gin.H{ templateVars["Data"] = gin.H{
"DailyStreak": daily_streak, "Streaks": streaks,
"WeeklyStreak": weekly_streak,
"DatabaseInfo": database_info, "DatabaseInfo": database_info,
"GraphData": read_graph_data, "GraphData": read_graph_data,
} }

View File

@ -277,6 +277,11 @@ func (api *API) addActivities(c *gin.Context) {
return return
} }
// Update Temp Tables
if err := api.DB.CacheTempTables(); err != nil {
log.Warn("[addActivities] CacheTempTables Failure: ", err)
}
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"added": len(rActivity.Activity), "added": len(rActivity.Activity),
}) })

View File

@ -75,7 +75,7 @@ local STATISTICS_ACTIVITY_SINCE_QUERY = [[
JOIN book AS b JOIN book AS b
ON b.id = psd.id_book ON b.id = psd.id_book
WHERE start_time > %d WHERE start_time > %d
ORDER BY start_time ASC LIMIT 1000; ORDER BY start_time ASC LIMIT 5000;
]] ]]
local STATISTICS_BOOK_QUERY = [[ local STATISTICS_BOOK_QUERY = [[
@ -907,7 +907,7 @@ function SyncNinja:getStatisticsActivity(timestamp)
local conn = SQ3.open(statistics_db) local conn = SQ3.open(statistics_db)
local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY, local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY,
timestamp)) timestamp))
local rows = stmt:resultset("i", 1000) local rows = stmt:resultset("i", 5000)
conn:close() conn:close()
-- No Results -- No Results

View File

@ -21,6 +21,9 @@ type DBManager struct {
//go:embed schema.sql //go:embed schema.sql
var ddl string var ddl string
//go:embed update_temp_tables.sql
var tsql string
func NewMgr(c *config.Config) *DBManager { func NewMgr(c *config.Config) *DBManager {
// Create Manager // Create Manager
dbm := &DBManager{ dbm := &DBManager{
@ -44,22 +47,23 @@ func NewMgr(c *config.Config) *DBManager {
log.Fatal("Unsupported Database") log.Fatal("Unsupported Database")
} }
// Create Tables
if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil {
log.Fatal(err)
}
dbm.Queries = New(dbm.DB) dbm.Queries = New(dbm.DB)
return dbm return dbm
} }
func connectHookSQLite(conn *sqlite.SQLiteConn) error { func (dbm *DBManager) CacheTempTables() error {
if err := conn.RegisterFunc("test_func", func() string { if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil {
return "FOOBAR"
}, false); err != nil {
log.Info("Error Registering Function")
return err return err
} }
return nil return nil
} }
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
// Create Tables
log.Debug("Creating Schema")
if _, err := conn.Exec(ddl, nil); err != nil {
log.Warn("Create Schema Failure: ", err)
}
return nil
}

View File

@ -9,15 +9,14 @@ import (
) )
type Activity struct { type Activity struct {
ID int64 `json:"id"`
UserID string `json:"user_id"` UserID string `json:"user_id"`
DocumentID string `json:"document_id"` DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"` DeviceID string `json:"device_id"`
CreatedAt time.Time `json:"created_at"`
StartTime time.Time `json:"start_time"` StartTime time.Time `json:"start_time"`
Duration int64 `json:"duration"`
Page int64 `json:"page"` Page int64 `json:"page"`
Pages int64 `json:"pages"` Pages int64 `json:"pages"`
CreatedAt time.Time `json:"created_at"` Duration int64 `json:"duration"`
} }
type Device struct { type Device struct {
@ -80,14 +79,16 @@ type Metadatum struct {
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
} }
type RescaledActivity struct { type RawActivity struct {
ID int64 `json:"id"`
UserID string `json:"user_id"`
DocumentID string `json:"document_id"` DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"` DeviceID string `json:"device_id"`
UserID string `json:"user_id"`
StartTime time.Time `json:"start_time"` StartTime time.Time `json:"start_time"`
Pages int64 `json:"pages"`
Page int64 `json:"page"` Page int64 `json:"page"`
Pages int64 `json:"pages"`
Duration int64 `json:"duration"` Duration int64 `json:"duration"`
CreatedAt time.Time `json:"created_at"`
} }
type User struct { type User struct {
@ -97,3 +98,36 @@ type User struct {
TimeOffset *string `json:"time_offset"` TimeOffset *string `json:"time_offset"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
} }
type UserStreak struct {
UserID string `json:"user_id"`
Window string `json:"window"`
MaxStreak int64 `json:"max_streak"`
MaxStreakStartDate string `json:"max_streak_start_date"`
MaxStreakEndDate string `json:"max_streak_end_date"`
CurrentStreak int64 `json:"current_streak"`
CurrentStreakStartDate string `json:"current_streak_start_date"`
CurrentStreakEndDate string `json:"current_streak_end_date"`
}
type ViewRescaledActivity struct {
UserID string `json:"user_id"`
DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"`
CreatedAt time.Time `json:"created_at"`
StartTime time.Time `json:"start_time"`
Page int64 `json:"page"`
Pages int64 `json:"pages"`
Duration int64 `json:"duration"`
}
type ViewUserStreak struct {
UserID string `json:"user_id"`
Window string `json:"window"`
MaxStreak interface{} `json:"max_streak"`
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
CurrentStreak interface{} `json:"current_streak"`
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
}

View File

@ -135,7 +135,7 @@ AND user_id = $user_id
ORDER BY start_time DESC LIMIT 1; ORDER BY start_time DESC LIMIT 1;
-- name: AddActivity :one -- name: AddActivity :one
INSERT INTO activity ( INSERT INTO raw_activity (
user_id, user_id,
document_id, document_id,
device_id, device_id,
@ -195,12 +195,12 @@ WITH true_progress AS (
page, page,
pages, pages,
-- Determine Read Pages -- Determine Read Pages
COUNT(DISTINCT page) AS read_pages, COUNT(DISTINCT page) AS read_pages,
-- Derive Percentage of Book -- Derive Percentage of Book
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
FROM rescaled_activity FROM activity
WHERE user_id = $user_id WHERE user_id = $user_id
AND document_id = $document_id AND document_id = $document_id
GROUP BY document_id GROUP BY document_id
@ -220,15 +220,15 @@ SELECT
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity) -- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
-- 2. Divide by Read Pages (Distinct Pages in Activity) -- 2. Divide by Read Pages (Distinct Pages in Activity)
CAST(CASE CAST(CASE
WHEN total_time_seconds IS NULL THEN 0.0 WHEN total_time_seconds IS NULL THEN 0.0
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
END AS INTEGER) AS seconds_per_page, END AS INTEGER) AS seconds_per_page,
-- Arbitrarily >97% is Complete -- Arbitrarily >97% is Complete
CAST(CASE CAST(CASE
WHEN percentage > 97.0 THEN 100.0 WHEN percentage > 97.0 THEN 100.0
WHEN percentage IS NULL THEN 0.0 WHEN percentage IS NULL THEN 0.0
ELSE percentage ELSE percentage
END AS REAL) AS percentage END AS REAL) AS percentage
FROM documents FROM documents
@ -291,7 +291,7 @@ OFFSET $offset;
-- name: GetActivity :many -- name: GetActivity :many
SELECT SELECT
document_id, document_id,
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time, CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
title, title,
author, author,
duration, duration,
@ -303,11 +303,12 @@ LEFT JOIN users ON users.id = activity.user_id
WHERE WHERE
activity.user_id = $user_id activity.user_id = $user_id
AND ( AND (
CAST($doc_filter AS BOOLEAN) = TRUE (
AND document_id = $document_id CAST($doc_filter AS BOOLEAN) = TRUE
AND document_id = $document_id
) OR $doc_filter = FALSE
) )
OR $doc_filter = FALSE ORDER BY activity.start_time DESC
ORDER BY start_time DESC
LIMIT $limit LIMIT $limit
OFFSET $offset; OFFSET $offset;
@ -326,7 +327,7 @@ GROUP BY activity.device_id;
SELECT SELECT
COUNT(DISTINCT page) AS pages_read, COUNT(DISTINCT page) AS pages_read,
SUM(duration) AS total_time SUM(duration) AS total_time
FROM rescaled_activity FROM activity
WHERE document_id = $document_id WHERE document_id = $document_id
AND user_id = $user_id AND user_id = $user_id
AND start_time >= $start_time; AND start_time >= $start_time;
@ -334,7 +335,7 @@ AND start_time >= $start_time;
-- name: GetDocumentReadStatsCapped :one -- name: GetDocumentReadStatsCapped :one
WITH capped_stats AS ( WITH capped_stats AS (
SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations
FROM rescaled_activity FROM activity
WHERE document_id = $document_id WHERE document_id = $document_id
AND user_id = $user_id AND user_id = $user_id
AND start_time >= $start_time AND start_time >= $start_time
@ -357,77 +358,9 @@ WITH document_days AS (
SELECT CAST(COUNT(*) AS INTEGER) AS days_read SELECT CAST(COUNT(*) AS INTEGER) AS days_read
FROM document_days; FROM document_days;
-- name: GetUserWindowStreaks :one -- name: GetUserStreaks :many
WITH document_windows AS ( SELECT * FROM user_streaks
SELECT WHERE user_id = $user_id;
CASE
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
END AS read_window,
time_offset
FROM activity
JOIN users ON users.id = activity.user_id
WHERE user_id = $user_id
AND CAST($window AS TEXT) = CAST($window AS TEXT)
GROUP BY read_window
),
partitions AS (
SELECT
document_windows.*,
row_number() OVER (
PARTITION BY 1 ORDER BY read_window DESC
) AS seqnum
FROM document_windows
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
time_offset
FROM partitions
GROUP BY
CASE
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
END,
time_offset
ORDER BY end_date DESC
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date
FROM streaks
LIMIT 1
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date
FROM streaks
WHERE CASE
WHEN ?2 = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN ?2 = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
LIMIT 1
)
SELECT
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON 1 = 1
LIMIT 1;
-- name: GetDatabaseInfo :one -- name: GetDatabaseInfo :one
SELECT SELECT

View File

@ -13,7 +13,7 @@ import (
) )
const addActivity = `-- name: AddActivity :one const addActivity = `-- name: AddActivity :one
INSERT INTO activity ( INSERT INTO raw_activity (
user_id, user_id,
document_id, document_id,
device_id, device_id,
@ -23,7 +23,7 @@ INSERT INTO activity (
pages pages
) )
VALUES (?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?)
RETURNING id, user_id, document_id, device_id, start_time, duration, page, pages, created_at RETURNING id, user_id, document_id, device_id, start_time, page, pages, duration, created_at
` `
type AddActivityParams struct { type AddActivityParams struct {
@ -36,7 +36,7 @@ type AddActivityParams struct {
Pages int64 `json:"pages"` Pages int64 `json:"pages"`
} }
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activity, error) { func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawActivity, error) {
row := q.db.QueryRowContext(ctx, addActivity, row := q.db.QueryRowContext(ctx, addActivity,
arg.UserID, arg.UserID,
arg.DocumentID, arg.DocumentID,
@ -46,16 +46,16 @@ func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activ
arg.Page, arg.Page,
arg.Pages, arg.Pages,
) )
var i Activity var i RawActivity
err := row.Scan( err := row.Scan(
&i.ID, &i.ID,
&i.UserID, &i.UserID,
&i.DocumentID, &i.DocumentID,
&i.DeviceID, &i.DeviceID,
&i.StartTime, &i.StartTime,
&i.Duration,
&i.Page, &i.Page,
&i.Pages, &i.Pages,
&i.Duration,
&i.CreatedAt, &i.CreatedAt,
) )
return i, err return i, err
@ -151,7 +151,7 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error)
const getActivity = `-- name: GetActivity :many const getActivity = `-- name: GetActivity :many
SELECT SELECT
document_id, document_id,
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time, CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
title, title,
author, author,
duration, duration,
@ -163,11 +163,12 @@ LEFT JOIN users ON users.id = activity.user_id
WHERE WHERE
activity.user_id = ?1 activity.user_id = ?1
AND ( AND (
CAST(?2 AS BOOLEAN) = TRUE (
AND document_id = ?3 CAST(?2 AS BOOLEAN) = TRUE
AND document_id = ?3
) OR ?2 = FALSE
) )
OR ?2 = FALSE ORDER BY activity.start_time DESC
ORDER BY start_time DESC
LIMIT ?5 LIMIT ?5
OFFSET ?4 OFFSET ?4
` `
@ -477,7 +478,7 @@ const getDocumentReadStats = `-- name: GetDocumentReadStats :one
SELECT SELECT
COUNT(DISTINCT page) AS pages_read, COUNT(DISTINCT page) AS pages_read,
SUM(duration) AS total_time SUM(duration) AS total_time
FROM rescaled_activity FROM activity
WHERE document_id = ?1 WHERE document_id = ?1
AND user_id = ?2 AND user_id = ?2
AND start_time >= ?3 AND start_time >= ?3
@ -504,7 +505,7 @@ func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadS
const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one
WITH capped_stats AS ( WITH capped_stats AS (
SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations
FROM rescaled_activity FROM activity
WHERE document_id = ?2 WHERE document_id = ?2
AND user_id = ?3 AND user_id = ?3
AND start_time >= ?4 AND start_time >= ?4
@ -549,12 +550,12 @@ WITH true_progress AS (
page, page,
pages, pages,
-- Determine Read Pages -- Determine Read Pages
COUNT(DISTINCT page) AS read_pages, COUNT(DISTINCT page) AS read_pages,
-- Derive Percentage of Book -- Derive Percentage of Book
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
FROM rescaled_activity FROM activity
WHERE user_id = ?1 WHERE user_id = ?1
AND document_id = ?2 AND document_id = ?2
GROUP BY document_id GROUP BY document_id
@ -574,15 +575,15 @@ SELECT
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity) -- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
-- 2. Divide by Read Pages (Distinct Pages in Activity) -- 2. Divide by Read Pages (Distinct Pages in Activity)
CAST(CASE CAST(CASE
WHEN total_time_seconds IS NULL THEN 0.0 WHEN total_time_seconds IS NULL THEN 0.0
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
END AS INTEGER) AS seconds_per_page, END AS INTEGER) AS seconds_per_page,
-- Arbitrarily >97% is Complete -- Arbitrarily >97% is Complete
CAST(CASE CAST(CASE
WHEN percentage > 97.0 THEN 100.0 WHEN percentage > 97.0 THEN 100.0
WHEN percentage IS NULL THEN 0.0 WHEN percentage IS NULL THEN 0.0
ELSE percentage ELSE percentage
END AS REAL) AS percentage END AS REAL) AS percentage
FROM documents FROM documents
@ -978,105 +979,41 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
return i, err return i, err
} }
const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one const getUserStreaks = `-- name: GetUserStreaks :many
WITH document_windows AS ( SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks
SELECT WHERE user_id = ?1
CASE
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
END AS read_window,
time_offset
FROM activity
JOIN users ON users.id = activity.user_id
WHERE user_id = ?1
AND CAST(?2 AS TEXT) = CAST(?2 AS TEXT)
GROUP BY read_window
),
partitions AS (
SELECT
document_windows.read_window, document_windows.time_offset,
row_number() OVER (
PARTITION BY 1 ORDER BY read_window DESC
) AS seqnum
FROM document_windows
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
time_offset
FROM partitions
GROUP BY
CASE
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
END,
time_offset
ORDER BY end_date DESC
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date
FROM streaks
LIMIT 1
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date
FROM streaks
WHERE CASE
WHEN ?2 = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN ?2 = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
LIMIT 1
)
SELECT
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON 1 = 1
LIMIT 1
` `
type GetUserWindowStreaksParams struct { func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStreak, error) {
UserID string `json:"user_id"` rows, err := q.db.QueryContext(ctx, getUserStreaks, userID)
Window string `json:"window"` if err != nil {
} return nil, err
}
type GetUserWindowStreaksRow struct { defer rows.Close()
MaxStreak int64 `json:"max_streak"` var items []UserStreak
MaxStreakStartDate string `json:"max_streak_start_date"` for rows.Next() {
MaxStreakEndDate string `json:"max_streak_end_date"` var i UserStreak
CurrentStreak interface{} `json:"current_streak"` if err := rows.Scan(
CurrentStreakStartDate string `json:"current_streak_start_date"` &i.UserID,
CurrentStreakEndDate string `json:"current_streak_end_date"` &i.Window,
} &i.MaxStreak,
&i.MaxStreakStartDate,
func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) { &i.MaxStreakEndDate,
row := q.db.QueryRowContext(ctx, getUserWindowStreaks, arg.UserID, arg.Window) &i.CurrentStreak,
var i GetUserWindowStreaksRow &i.CurrentStreakStartDate,
err := row.Scan( &i.CurrentStreakEndDate,
&i.MaxStreak, ); err != nil {
&i.MaxStreakStartDate, return nil, err
&i.MaxStreakEndDate, }
&i.CurrentStreak, items = append(items, i)
&i.CurrentStreakStartDate, }
&i.CurrentStreakEndDate, if err := rows.Close(); err != nil {
) return nil, err
return i, err }
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
} }
const getUsers = `-- name: GetUsers :many const getUsers = `-- name: GetUsers :many

View File

@ -1,6 +1,10 @@
PRAGMA foreign_keys = ON; PRAGMA foreign_keys = ON;
PRAGMA journal_mode = WAL; PRAGMA journal_mode = WAL;
---------------------------------------------------------------
------------------------ Normal Tables ------------------------
---------------------------------------------------------------
-- Authentication -- Authentication
CREATE TABLE IF NOT EXISTS users ( CREATE TABLE IF NOT EXISTS users (
id TEXT NOT NULL PRIMARY KEY, id TEXT NOT NULL PRIMARY KEY,
@ -101,17 +105,17 @@ CREATE TABLE IF NOT EXISTS document_progress (
PRIMARY KEY (user_id, document_id, device_id) PRIMARY KEY (user_id, document_id, device_id)
); );
-- Read Activity -- Raw Read Activity
CREATE TABLE IF NOT EXISTS activity ( CREATE TABLE IF NOT EXISTS raw_activity (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
document_id TEXT NOT NULL, document_id TEXT NOT NULL,
device_id TEXT NOT NULL, device_id TEXT NOT NULL,
start_time DATETIME NOT NULL, start_time DATETIME NOT NULL,
duration INTEGER NOT NULL,
page INTEGER NOT NULL, page INTEGER NOT NULL,
pages INTEGER NOT NULL, pages INTEGER NOT NULL,
duration INTEGER NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id), FOREIGN KEY (user_id) REFERENCES users (id),
@ -119,23 +123,57 @@ CREATE TABLE IF NOT EXISTS activity (
FOREIGN KEY (device_id) REFERENCES devices (id) FOREIGN KEY (device_id) REFERENCES devices (id)
); );
-- Indexes ---------------------------------------------------------------
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time); ----------------------- Temporary Tables ----------------------
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity ( ---------------------------------------------------------------
-- Temporary Activity Table (Cached from View)
CREATE TEMPORARY TABLE IF NOT EXISTS activity (
user_id TEXT NOT NULL,
document_id TEXT NOT NULL,
device_id TEXT NOT NULL,
created_at DATETIME NOT NULL,
start_time DATETIME NOT NULL,
page INTEGER NOT NULL,
pages INTEGER NOT NULL,
duration INTEGER NOT NULL
);
-- Temporary User Streaks Table (Cached from View)
CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
user_id TEXT NOT NULL,
window TEXT NOT NULL,
max_streak INTEGER NOT NULL,
max_streak_start_date TEXT NOT NULL,
max_streak_end_date TEXT NOT NULL,
current_streak INTEGER NOT NULL,
current_streak_start_date TEXT NOT NULL,
current_streak_end_date TEXT NOT NULL
);
---------------------------------------------------------------
--------------------------- Indexes ---------------------------
---------------------------------------------------------------
CREATE INDEX IF NOT EXISTS temp.activity_start_time ON activity (start_time);
CREATE INDEX IF NOT EXISTS temp.activity_user_id ON activity (user_id);
CREATE INDEX IF NOT EXISTS temp.activity_user_id_document_id ON activity (
user_id, user_id,
document_id document_id
); );
-- Update Trigger ---------------------------------------------------------------
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at ---------------------------- Views ----------------------------
BEFORE UPDATE ON documents BEGIN ---------------------------------------------------------------
UPDATE documents
SET updated_at = CURRENT_TIMESTAMP
WHERE id = old.id;
END;
-- Rescaled Activity View (Adapted from KOReader) --------------------------------
CREATE VIEW IF NOT EXISTS rescaled_activity AS ------- Rescaled Activity ------
--------------------------------
CREATE VIEW IF NOT EXISTS view_rescaled_activity AS
WITH RECURSIVE nums (idx) AS ( WITH RECURSIVE nums (idx) AS (
SELECT 1 AS idx SELECT 1 AS idx
@ -150,7 +188,7 @@ current_pages AS (
document_id, document_id,
user_id, user_id,
pages pages
FROM activity FROM raw_activity
GROUP BY document_id, user_id GROUP BY document_id, user_id
HAVING MAX(start_time) HAVING MAX(start_time)
ORDER BY start_time DESC ORDER BY start_time DESC
@ -158,33 +196,33 @@ current_pages AS (
intermediate AS ( intermediate AS (
SELECT SELECT
activity.document_id, raw_activity.document_id,
activity.device_id, raw_activity.device_id,
activity.user_id, raw_activity.user_id,
activity.start_time, raw_activity.created_at,
activity.duration, raw_activity.start_time,
activity.page, raw_activity.duration,
raw_activity.page,
current_pages.pages, current_pages.pages,
-- Derive first page -- Derive first page
((activity.page - 1) * current_pages.pages) / activity.pages ((raw_activity.page - 1) * current_pages.pages) / raw_activity.pages
+ 1 AS first_page, + 1 AS first_page,
-- Derive last page -- Derive last page
MAX( MAX(
((activity.page - 1) * current_pages.pages) ((raw_activity.page - 1) * current_pages.pages)
/ activity.pages / raw_activity.pages
+ 1, + 1,
(activity.page * current_pages.pages) / activity.pages (raw_activity.page * current_pages.pages) / raw_activity.pages
) AS last_page ) AS last_page
FROM activity FROM raw_activity
INNER JOIN current_pages ON INNER JOIN current_pages ON
current_pages.document_id = activity.document_id current_pages.document_id = raw_activity.document_id
AND current_pages.user_id = activity.user_id AND current_pages.user_id = raw_activity.user_id
), ),
-- Improves performance
num_limit AS ( num_limit AS (
SELECT * FROM nums SELECT * FROM nums
LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate) LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate)
@ -192,28 +230,30 @@ num_limit AS (
rescaled_raw AS ( rescaled_raw AS (
SELECT SELECT
document_id, intermediate.document_id,
device_id, intermediate.device_id,
user_id, intermediate.user_id,
start_time, intermediate.created_at,
last_page, intermediate.start_time,
pages, intermediate.last_page,
first_page + num_limit.idx - 1 AS page, intermediate.pages,
duration / ( intermediate.first_page + num_limit.idx - 1 AS page,
last_page - first_page + 1.0 intermediate.duration / (
intermediate.last_page - intermediate.first_page + 1.0
) AS duration ) AS duration
FROM intermediate FROM intermediate
JOIN num_limit ON LEFT JOIN num_limit ON
num_limit.idx <= (last_page - first_page + 1) num_limit.idx <= (intermediate.last_page - intermediate.first_page + 1)
) )
SELECT SELECT
user_id,
document_id, document_id,
device_id, device_id,
user_id, created_at,
start_time, start_time,
pages,
page, page,
pages,
-- Round up if last page (maintains total duration) -- Round up if last page (maintains total duration)
CAST(CASE CAST(CASE
@ -222,3 +262,140 @@ SELECT
ELSE duration ELSE duration
END AS INTEGER) AS duration END AS INTEGER) AS duration
FROM rescaled_raw; FROM rescaled_raw;
--------------------------------
--------- User Streaks ---------
--------------------------------
CREATE VIEW IF NOT EXISTS view_user_streaks AS
WITH document_windows AS (
SELECT
activity.user_id,
users.time_offset,
DATE(
activity.start_time,
users.time_offset,
'weekday 0', '-7 day'
) AS weekly_read,
DATE(activity.start_time, users.time_offset) AS daily_read
FROM raw_activity AS activity
LEFT JOIN users ON users.id = activity.user_id
GROUP BY activity.user_id, weekly_read, daily_read
),
weekly_partitions AS (
SELECT
user_id,
time_offset,
'WEEK' AS "window",
weekly_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY weekly_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, weekly_read
),
daily_partitions AS (
SELECT
user_id,
time_offset,
'DAY' AS "window",
daily_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY daily_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, daily_read
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
time_offset
FROM daily_partitions
GROUP BY
time_offset,
user_id,
DATE(read_window, '+' || seqnum || ' day')
UNION ALL
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
time_offset
FROM weekly_partitions
GROUP BY
time_offset,
user_id,
DATE(read_window, '+' || (seqnum * 7) || ' day')
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date,
window,
user_id
FROM streaks
GROUP BY user_id, window
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date,
window,
user_id
FROM streaks
WHERE CASE
WHEN window = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN window = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
GROUP BY user_id, window
)
SELECT
max_streak.user_id,
max_streak.window,
IFNULL(max_streak, 0) AS max_streak,
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON
current_streak.user_id = max_streak.user_id
AND current_streak.window = max_streak.window;
---------------------------------------------------------------
------------------ Populate Temporary Tables ------------------
---------------------------------------------------------------
INSERT INTO activity SELECT * FROM view_rescaled_activity;
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
---------------------------------------------------------------
--------------------------- Triggers --------------------------
---------------------------------------------------------------
-- Update Trigger
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
BEFORE UPDATE ON documents BEGIN
UPDATE documents
SET updated_at = CURRENT_TIMESTAMP
WHERE id = old.id;
END;

View File

@ -0,0 +1,4 @@
DELETE FROM activity;
INSERT INTO activity SELECT * FROM view_rescaled_activity;
DELETE FROM user_streaks;
INSERT INTO user_streaks SELECT * FROM view_user_streaks;

View File

@ -151,6 +151,7 @@
</div> </div>
<div class="grid grid-cols-1 gap-4 my-4 md:grid-cols-2 lg:grid-cols-3"> <div class="grid grid-cols-1 gap-4 my-4 md:grid-cols-2 lg:grid-cols-3">
{{ range $item := .Data.Streaks }}
<div class="w-full"> <div class="w-full">
<div <div
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded" class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
@ -158,11 +159,12 @@
<p <p
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500" class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
> >
Daily Read Streak {{ if eq $item.Window "WEEK" }} Weekly Read Streak {{ else }} Daily Read
Streak {{ end }}
</p> </p>
<div class="flex items-end my-6 space-x-2"> <div class="flex items-end my-6 space-x-2">
<p class="text-5xl font-bold text-black dark:text-white"> <p class="text-5xl font-bold text-black dark:text-white">
{{ .Data.DailyStreak.CurrentStreak }} {{ $item.CurrentStreak }}
</p> </p>
</div> </div>
<div class="dark:text-white"> <div class="dark:text-white">
@ -170,76 +172,33 @@
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200" class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
> >
<div> <div>
<p>Current Daily Streak</p> <p>
{{ if eq $item.Window "WEEK" }} Current Weekly Streak {{ else }}
Current Daily Streak {{ end }}
</p>
<div class="flex items-end text-sm text-gray-400"> <div class="flex items-end text-sm text-gray-400">
{{ .Data.DailyStreak.CurrentStreakStartDate }} ➞ {{ {{ $item.CurrentStreakStartDate }} ➞ {{ $item.CurrentStreakEndDate
.Data.DailyStreak.CurrentStreakEndDate }} }}
</div> </div>
</div> </div>
<div class="flex items-end font-bold"> <div class="flex items-end font-bold">{{ $item.CurrentStreak }}</div>
{{ .Data.DailyStreak.CurrentStreak }}
</div>
</div> </div>
<div class="flex items-center justify-between pb-2 mb-2 text-sm"> <div class="flex items-center justify-between pb-2 mb-2 text-sm">
<div> <div>
<p>Best Daily Streak</p> <p>
{{ if eq $item.Window "WEEK" }} Best Weekly Streak {{ else }} Best
Daily Streak {{ end }}
</p>
<div class="flex items-end text-sm text-gray-400"> <div class="flex items-end text-sm text-gray-400">
{{ .Data.DailyStreak.MaxStreakStartDate }} ➞ {{ {{ $item.MaxStreakStartDate }} ➞ {{ $item.MaxStreakEndDate }}
.Data.DailyStreak.MaxStreakEndDate }}
</div> </div>
</div> </div>
<div class="flex items-end font-bold"> <div class="flex items-end font-bold">{{ $item.MaxStreak }}</div>
{{ .Data.DailyStreak.MaxStreak }}
</div>
</div>
</div>
</div>
</div>
<div class="w-full">
<div
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
>
<p
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
>
Weekly Read Streak
</p>
<div class="flex items-end my-6 space-x-2">
<p class="text-5xl font-bold text-black dark:text-white">
{{ .Data.WeeklyStreak.CurrentStreak }}
</p>
</div>
<div class="dark:text-white">
<div
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
>
<div>
<p>Current Weekly Streak</p>
<div class="flex items-end text-sm text-gray-400">
{{ .Data.WeeklyStreak.CurrentStreakStartDate }} ➞ {{
.Data.WeeklyStreak.CurrentStreakEndDate }}
</div>
</div>
<div class="flex items-end font-bold">
{{ .Data.WeeklyStreak.CurrentStreak }}
</div>
</div>
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
<div>
<p>Best Weekly Streak</p>
<div class="flex items-end text-sm text-gray-400">
{{ .Data.WeeklyStreak.MaxStreakStartDate }} ➞ {{
.Data.WeeklyStreak.MaxStreakEndDate }}
</div>
</div>
<div class="flex items-end font-bold">
{{ .Data.WeeklyStreak.MaxStreak }}
</div>
</div> </div>
</div> </div>
</div> </div>
</div> </div>
{{ end }}
</div> </div>
{{end}} {{end}}

View File

@ -56,6 +56,10 @@ func GetUTCOffsets() []UTCOffset {
} }
func NiceSeconds(input int64) (result string) { func NiceSeconds(input int64) (result string) {
if input == 0 {
return "N/A"
}
days := math.Floor(float64(input) / 60 / 60 / 24) days := math.Floor(float64(input) / 60 / 60 / 24)
seconds := input % (60 * 60 * 24) seconds := input % (60 * 60 * 24)
hours := math.Floor(float64(seconds) / 60 / 60) hours := math.Floor(float64(seconds) / 60 / 60)