Compare commits

..

1 Commits

12 changed files with 354 additions and 386 deletions

View File

@ -8,6 +8,7 @@ import (
"os"
"path/filepath"
"strings"
"time"
argon2 "github.com/alexedwards/argon2id"
"github.com/gabriel-vasile/mimetype"
@ -129,7 +130,7 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
"WordsPerMinute": "N/A",
}
if document.Words != nil && *document.Words != 0 && document.TotalTimeSeconds != 0 {
if document.Words != nil && *document.Words != 0 {
statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
}
@ -157,13 +158,37 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
templateVars["Data"] = activity
} else if routeName == "home" {
start_time := time.Now()
weekly_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{
UserID: userID,
Window: "WEEK",
})
if err != nil {
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
}
log.Debug("GetUserWindowStreaks - WEEK - ", time.Since(start_time))
start_time = time.Now()
streaks, _ := api.DB.Queries.GetUserStreaks(api.DB.Ctx, userID)
daily_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{
UserID: userID,
Window: "DAY",
})
if err != nil {
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
}
log.Debug("GetUserWindowStreaks - DAY - ", time.Since(start_time))
start_time = time.Now()
database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID)
log.Debug("GetDatabaseInfo - ", time.Since(start_time))
start_time = time.Now()
read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID)
log.Debug("GetDailyReadStats - ", time.Since(start_time))
templateVars["Data"] = gin.H{
"Streaks": streaks,
"DailyStreak": daily_streak,
"WeeklyStreak": weekly_streak,
"DatabaseInfo": database_info,
"GraphData": read_graph_data,
}

View File

@ -277,11 +277,6 @@ func (api *API) addActivities(c *gin.Context) {
return
}
// Update Temp Tables
if err := api.DB.CacheTempTables(); err != nil {
log.Warn("[addActivities] CacheTempTables Failure: ", err)
}
c.JSON(http.StatusOK, gin.H{
"added": len(rActivity.Activity),
})

0
book_manager.db Normal file
View File

View File

@ -75,7 +75,7 @@ local STATISTICS_ACTIVITY_SINCE_QUERY = [[
JOIN book AS b
ON b.id = psd.id_book
WHERE start_time > %d
ORDER BY start_time ASC LIMIT 5000;
ORDER BY start_time ASC LIMIT 1000;
]]
local STATISTICS_BOOK_QUERY = [[
@ -907,7 +907,7 @@ function SyncNinja:getStatisticsActivity(timestamp)
local conn = SQ3.open(statistics_db)
local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY,
timestamp))
local rows = stmt:resultset("i", 5000)
local rows = stmt:resultset("i", 1000)
conn:close()
-- No Results

View File

@ -21,9 +21,6 @@ type DBManager struct {
//go:embed schema.sql
var ddl string
//go:embed update_temp_tables.sql
var tsql string
func NewMgr(c *config.Config) *DBManager {
// Create Manager
dbm := &DBManager{
@ -47,23 +44,22 @@ func NewMgr(c *config.Config) *DBManager {
log.Fatal("Unsupported Database")
}
// Create Tables
if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil {
log.Fatal(err)
}
dbm.Queries = New(dbm.DB)
return dbm
}
func (dbm *DBManager) CacheTempTables() error {
if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil {
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
if err := conn.RegisterFunc("test_func", func() string {
return "FOOBAR"
}, false); err != nil {
log.Info("Error Registering Function")
return err
}
return nil
}
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
// Create Tables
log.Debug("Creating Schema")
if _, err := conn.Exec(ddl, nil); err != nil {
log.Warn("Create Schema Failure: ", err)
}
return nil
}

View File

@ -9,14 +9,15 @@ import (
)
type Activity struct {
ID int64 `json:"id"`
UserID string `json:"user_id"`
DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"`
CreatedAt time.Time `json:"created_at"`
StartTime time.Time `json:"start_time"`
Duration int64 `json:"duration"`
Page int64 `json:"page"`
Pages int64 `json:"pages"`
Duration int64 `json:"duration"`
CreatedAt time.Time `json:"created_at"`
}
type Device struct {
@ -79,16 +80,14 @@ type Metadatum struct {
CreatedAt time.Time `json:"created_at"`
}
type RawActivity struct {
ID int64 `json:"id"`
UserID string `json:"user_id"`
type RescaledActivity struct {
DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"`
UserID string `json:"user_id"`
StartTime time.Time `json:"start_time"`
Page int64 `json:"page"`
Pages int64 `json:"pages"`
Page int64 `json:"page"`
Duration int64 `json:"duration"`
CreatedAt time.Time `json:"created_at"`
}
type User struct {
@ -98,36 +97,3 @@ type User struct {
TimeOffset *string `json:"time_offset"`
CreatedAt time.Time `json:"created_at"`
}
type UserStreak struct {
UserID string `json:"user_id"`
Window string `json:"window"`
MaxStreak int64 `json:"max_streak"`
MaxStreakStartDate string `json:"max_streak_start_date"`
MaxStreakEndDate string `json:"max_streak_end_date"`
CurrentStreak int64 `json:"current_streak"`
CurrentStreakStartDate string `json:"current_streak_start_date"`
CurrentStreakEndDate string `json:"current_streak_end_date"`
}
type ViewRescaledActivity struct {
UserID string `json:"user_id"`
DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"`
CreatedAt time.Time `json:"created_at"`
StartTime time.Time `json:"start_time"`
Page int64 `json:"page"`
Pages int64 `json:"pages"`
Duration int64 `json:"duration"`
}
type ViewUserStreak struct {
UserID string `json:"user_id"`
Window string `json:"window"`
MaxStreak interface{} `json:"max_streak"`
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
CurrentStreak interface{} `json:"current_streak"`
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
}

View File

@ -135,7 +135,7 @@ AND user_id = $user_id
ORDER BY start_time DESC LIMIT 1;
-- name: AddActivity :one
INSERT INTO raw_activity (
INSERT INTO activity (
user_id,
document_id,
device_id,
@ -200,7 +200,7 @@ WITH true_progress AS (
-- Derive Percentage of Book
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
FROM activity
FROM rescaled_activity
WHERE user_id = $user_id
AND document_id = $document_id
GROUP BY document_id
@ -291,7 +291,7 @@ OFFSET $offset;
-- name: GetActivity :many
SELECT
document_id,
CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time,
title,
author,
duration,
@ -303,12 +303,11 @@ LEFT JOIN users ON users.id = activity.user_id
WHERE
activity.user_id = $user_id
AND (
(
CAST($doc_filter AS BOOLEAN) = TRUE
AND document_id = $document_id
) OR $doc_filter = FALSE
)
ORDER BY activity.start_time DESC
OR $doc_filter = FALSE
ORDER BY start_time DESC
LIMIT $limit
OFFSET $offset;
@ -327,7 +326,7 @@ GROUP BY activity.device_id;
SELECT
COUNT(DISTINCT page) AS pages_read,
SUM(duration) AS total_time
FROM activity
FROM rescaled_activity
WHERE document_id = $document_id
AND user_id = $user_id
AND start_time >= $start_time;
@ -335,7 +334,7 @@ AND start_time >= $start_time;
-- name: GetDocumentReadStatsCapped :one
WITH capped_stats AS (
SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations
FROM activity
FROM rescaled_activity
WHERE document_id = $document_id
AND user_id = $user_id
AND start_time >= $start_time
@ -358,9 +357,77 @@ WITH document_days AS (
SELECT CAST(COUNT(*) AS INTEGER) AS days_read
FROM document_days;
-- name: GetUserStreaks :many
SELECT * FROM user_streaks
WHERE user_id = $user_id;
-- name: GetUserWindowStreaks :one
WITH document_windows AS (
SELECT
CASE
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
END AS read_window,
time_offset
FROM activity
JOIN users ON users.id = activity.user_id
WHERE user_id = $user_id
AND CAST($window AS TEXT) = CAST($window AS TEXT)
GROUP BY read_window
),
partitions AS (
SELECT
document_windows.*,
row_number() OVER (
PARTITION BY 1 ORDER BY read_window DESC
) AS seqnum
FROM document_windows
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
time_offset
FROM partitions
GROUP BY
CASE
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
END,
time_offset
ORDER BY end_date DESC
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date
FROM streaks
LIMIT 1
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date
FROM streaks
WHERE CASE
WHEN ?2 = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN ?2 = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
LIMIT 1
)
SELECT
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON 1 = 1
LIMIT 1;
-- name: GetDatabaseInfo :one
SELECT

View File

@ -13,7 +13,7 @@ import (
)
const addActivity = `-- name: AddActivity :one
INSERT INTO raw_activity (
INSERT INTO activity (
user_id,
document_id,
device_id,
@ -23,7 +23,7 @@ INSERT INTO raw_activity (
pages
)
VALUES (?, ?, ?, ?, ?, ?, ?)
RETURNING id, user_id, document_id, device_id, start_time, page, pages, duration, created_at
RETURNING id, user_id, document_id, device_id, start_time, duration, page, pages, created_at
`
type AddActivityParams struct {
@ -36,7 +36,7 @@ type AddActivityParams struct {
Pages int64 `json:"pages"`
}
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawActivity, error) {
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activity, error) {
row := q.db.QueryRowContext(ctx, addActivity,
arg.UserID,
arg.DocumentID,
@ -46,16 +46,16 @@ func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawAc
arg.Page,
arg.Pages,
)
var i RawActivity
var i Activity
err := row.Scan(
&i.ID,
&i.UserID,
&i.DocumentID,
&i.DeviceID,
&i.StartTime,
&i.Duration,
&i.Page,
&i.Pages,
&i.Duration,
&i.CreatedAt,
)
return i, err
@ -151,7 +151,7 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error)
const getActivity = `-- name: GetActivity :many
SELECT
document_id,
CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time,
title,
author,
duration,
@ -163,12 +163,11 @@ LEFT JOIN users ON users.id = activity.user_id
WHERE
activity.user_id = ?1
AND (
(
CAST(?2 AS BOOLEAN) = TRUE
AND document_id = ?3
) OR ?2 = FALSE
)
ORDER BY activity.start_time DESC
OR ?2 = FALSE
ORDER BY start_time DESC
LIMIT ?5
OFFSET ?4
`
@ -478,7 +477,7 @@ const getDocumentReadStats = `-- name: GetDocumentReadStats :one
SELECT
COUNT(DISTINCT page) AS pages_read,
SUM(duration) AS total_time
FROM activity
FROM rescaled_activity
WHERE document_id = ?1
AND user_id = ?2
AND start_time >= ?3
@ -505,7 +504,7 @@ func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadS
const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one
WITH capped_stats AS (
SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations
FROM activity
FROM rescaled_activity
WHERE document_id = ?2
AND user_id = ?3
AND start_time >= ?4
@ -555,7 +554,7 @@ WITH true_progress AS (
-- Derive Percentage of Book
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
FROM activity
FROM rescaled_activity
WHERE user_id = ?1
AND document_id = ?2
GROUP BY document_id
@ -979,41 +978,105 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
return i, err
}
const getUserStreaks = `-- name: GetUserStreaks :many
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks
const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one
WITH document_windows AS (
SELECT
CASE
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
END AS read_window,
time_offset
FROM activity
JOIN users ON users.id = activity.user_id
WHERE user_id = ?1
AND CAST(?2 AS TEXT) = CAST(?2 AS TEXT)
GROUP BY read_window
),
partitions AS (
SELECT
document_windows.read_window, document_windows.time_offset,
row_number() OVER (
PARTITION BY 1 ORDER BY read_window DESC
) AS seqnum
FROM document_windows
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
time_offset
FROM partitions
GROUP BY
CASE
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
END,
time_offset
ORDER BY end_date DESC
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date
FROM streaks
LIMIT 1
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date
FROM streaks
WHERE CASE
WHEN ?2 = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN ?2 = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
LIMIT 1
)
SELECT
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON 1 = 1
LIMIT 1
`
func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStreak, error) {
rows, err := q.db.QueryContext(ctx, getUserStreaks, userID)
if err != nil {
return nil, err
type GetUserWindowStreaksParams struct {
UserID string `json:"user_id"`
Window string `json:"window"`
}
defer rows.Close()
var items []UserStreak
for rows.Next() {
var i UserStreak
if err := rows.Scan(
&i.UserID,
&i.Window,
type GetUserWindowStreaksRow struct {
MaxStreak int64 `json:"max_streak"`
MaxStreakStartDate string `json:"max_streak_start_date"`
MaxStreakEndDate string `json:"max_streak_end_date"`
CurrentStreak interface{} `json:"current_streak"`
CurrentStreakStartDate string `json:"current_streak_start_date"`
CurrentStreakEndDate string `json:"current_streak_end_date"`
}
func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) {
row := q.db.QueryRowContext(ctx, getUserWindowStreaks, arg.UserID, arg.Window)
var i GetUserWindowStreaksRow
err := row.Scan(
&i.MaxStreak,
&i.MaxStreakStartDate,
&i.MaxStreakEndDate,
&i.CurrentStreak,
&i.CurrentStreakStartDate,
&i.CurrentStreakEndDate,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
)
return i, err
}
const getUsers = `-- name: GetUsers :many

View File

@ -1,10 +1,6 @@
PRAGMA foreign_keys = ON;
PRAGMA journal_mode = WAL;
---------------------------------------------------------------
------------------------ Normal Tables ------------------------
---------------------------------------------------------------
-- Authentication
CREATE TABLE IF NOT EXISTS users (
id TEXT NOT NULL PRIMARY KEY,
@ -105,17 +101,17 @@ CREATE TABLE IF NOT EXISTS document_progress (
PRIMARY KEY (user_id, document_id, device_id)
);
-- Raw Read Activity
CREATE TABLE IF NOT EXISTS raw_activity (
-- Read Activity
CREATE TABLE IF NOT EXISTS activity (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT NOT NULL,
document_id TEXT NOT NULL,
device_id TEXT NOT NULL,
start_time DATETIME NOT NULL,
duration INTEGER NOT NULL,
page INTEGER NOT NULL,
pages INTEGER NOT NULL,
duration INTEGER NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id),
@ -123,57 +119,23 @@ CREATE TABLE IF NOT EXISTS raw_activity (
FOREIGN KEY (device_id) REFERENCES devices (id)
);
---------------------------------------------------------------
----------------------- Temporary Tables ----------------------
---------------------------------------------------------------
-- Temporary Activity Table (Cached from View)
CREATE TEMPORARY TABLE IF NOT EXISTS activity (
user_id TEXT NOT NULL,
document_id TEXT NOT NULL,
device_id TEXT NOT NULL,
created_at DATETIME NOT NULL,
start_time DATETIME NOT NULL,
page INTEGER NOT NULL,
pages INTEGER NOT NULL,
duration INTEGER NOT NULL
);
-- Temporary User Streaks Table (Cached from View)
CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
user_id TEXT NOT NULL,
window TEXT NOT NULL,
max_streak INTEGER NOT NULL,
max_streak_start_date TEXT NOT NULL,
max_streak_end_date TEXT NOT NULL,
current_streak INTEGER NOT NULL,
current_streak_start_date TEXT NOT NULL,
current_streak_end_date TEXT NOT NULL
);
---------------------------------------------------------------
--------------------------- Indexes ---------------------------
---------------------------------------------------------------
CREATE INDEX IF NOT EXISTS temp.activity_start_time ON activity (start_time);
CREATE INDEX IF NOT EXISTS temp.activity_user_id ON activity (user_id);
CREATE INDEX IF NOT EXISTS temp.activity_user_id_document_id ON activity (
-- Indexes
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time);
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
user_id,
document_id
);
---------------------------------------------------------------
---------------------------- Views ----------------------------
---------------------------------------------------------------
-- Update Trigger
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
BEFORE UPDATE ON documents BEGIN
UPDATE documents
SET updated_at = CURRENT_TIMESTAMP
WHERE id = old.id;
END;
--------------------------------
------- Rescaled Activity ------
--------------------------------
CREATE VIEW IF NOT EXISTS view_rescaled_activity AS
-- Rescaled Activity View (Adapted from KOReader)
CREATE VIEW IF NOT EXISTS rescaled_activity AS
WITH RECURSIVE nums (idx) AS (
SELECT 1 AS idx
@ -188,7 +150,7 @@ current_pages AS (
document_id,
user_id,
pages
FROM raw_activity
FROM activity
GROUP BY document_id, user_id
HAVING MAX(start_time)
ORDER BY start_time DESC
@ -196,33 +158,33 @@ current_pages AS (
intermediate AS (
SELECT
raw_activity.document_id,
raw_activity.device_id,
raw_activity.user_id,
raw_activity.created_at,
raw_activity.start_time,
raw_activity.duration,
raw_activity.page,
activity.document_id,
activity.device_id,
activity.user_id,
activity.start_time,
activity.duration,
activity.page,
current_pages.pages,
-- Derive first page
((raw_activity.page - 1) * current_pages.pages) / raw_activity.pages
((activity.page - 1) * current_pages.pages) / activity.pages
+ 1 AS first_page,
-- Derive last page
MAX(
((raw_activity.page - 1) * current_pages.pages)
/ raw_activity.pages
((activity.page - 1) * current_pages.pages)
/ activity.pages
+ 1,
(raw_activity.page * current_pages.pages) / raw_activity.pages
(activity.page * current_pages.pages) / activity.pages
) AS last_page
FROM raw_activity
FROM activity
INNER JOIN current_pages ON
current_pages.document_id = raw_activity.document_id
AND current_pages.user_id = raw_activity.user_id
current_pages.document_id = activity.document_id
AND current_pages.user_id = activity.user_id
),
-- Improves performance
num_limit AS (
SELECT * FROM nums
LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate)
@ -230,30 +192,28 @@ num_limit AS (
rescaled_raw AS (
SELECT
intermediate.document_id,
intermediate.device_id,
intermediate.user_id,
intermediate.created_at,
intermediate.start_time,
intermediate.last_page,
intermediate.pages,
intermediate.first_page + num_limit.idx - 1 AS page,
intermediate.duration / (
intermediate.last_page - intermediate.first_page + 1.0
document_id,
device_id,
user_id,
start_time,
last_page,
pages,
first_page + num_limit.idx - 1 AS page,
duration / (
last_page - first_page + 1.0
) AS duration
FROM intermediate
LEFT JOIN num_limit ON
num_limit.idx <= (intermediate.last_page - intermediate.first_page + 1)
JOIN num_limit ON
num_limit.idx <= (last_page - first_page + 1)
)
SELECT
user_id,
document_id,
device_id,
created_at,
user_id,
start_time,
page,
pages,
page,
-- Round up if last page (maintains total duration)
CAST(CASE
@ -262,140 +222,3 @@ SELECT
ELSE duration
END AS INTEGER) AS duration
FROM rescaled_raw;
--------------------------------
--------- User Streaks ---------
--------------------------------
CREATE VIEW IF NOT EXISTS view_user_streaks AS
WITH document_windows AS (
SELECT
activity.user_id,
users.time_offset,
DATE(
activity.start_time,
users.time_offset,
'weekday 0', '-7 day'
) AS weekly_read,
DATE(activity.start_time, users.time_offset) AS daily_read
FROM raw_activity AS activity
LEFT JOIN users ON users.id = activity.user_id
GROUP BY activity.user_id, weekly_read, daily_read
),
weekly_partitions AS (
SELECT
user_id,
time_offset,
'WEEK' AS "window",
weekly_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY weekly_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, weekly_read
),
daily_partitions AS (
SELECT
user_id,
time_offset,
'DAY' AS "window",
daily_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY daily_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, daily_read
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
time_offset
FROM daily_partitions
GROUP BY
time_offset,
user_id,
DATE(read_window, '+' || seqnum || ' day')
UNION ALL
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
time_offset
FROM weekly_partitions
GROUP BY
time_offset,
user_id,
DATE(read_window, '+' || (seqnum * 7) || ' day')
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date,
window,
user_id
FROM streaks
GROUP BY user_id, window
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date,
window,
user_id
FROM streaks
WHERE CASE
WHEN window = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN window = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
GROUP BY user_id, window
)
SELECT
max_streak.user_id,
max_streak.window,
IFNULL(max_streak, 0) AS max_streak,
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON
current_streak.user_id = max_streak.user_id
AND current_streak.window = max_streak.window;
---------------------------------------------------------------
------------------ Populate Temporary Tables ------------------
---------------------------------------------------------------
INSERT INTO activity SELECT * FROM view_rescaled_activity;
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
---------------------------------------------------------------
--------------------------- Triggers --------------------------
---------------------------------------------------------------
-- Update Trigger
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
BEFORE UPDATE ON documents BEGIN
UPDATE documents
SET updated_at = CURRENT_TIMESTAMP
WHERE id = old.id;
END;

View File

@ -1,4 +0,0 @@
DELETE FROM activity;
INSERT INTO activity SELECT * FROM view_rescaled_activity;
DELETE FROM user_streaks;
INSERT INTO user_streaks SELECT * FROM view_user_streaks;

View File

@ -151,7 +151,6 @@
</div>
<div class="grid grid-cols-1 gap-4 my-4 md:grid-cols-2 lg:grid-cols-3">
{{ range $item := .Data.Streaks }}
<div class="w-full">
<div
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
@ -159,12 +158,11 @@
<p
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
>
{{ if eq $item.Window "WEEK" }} Weekly Read Streak {{ else }} Daily Read
Streak {{ end }}
Daily Read Streak
</p>
<div class="flex items-end my-6 space-x-2">
<p class="text-5xl font-bold text-black dark:text-white">
{{ $item.CurrentStreak }}
{{ .Data.DailyStreak.CurrentStreak }}
</p>
</div>
<div class="dark:text-white">
@ -172,33 +170,76 @@
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
>
<div>
<p>
{{ if eq $item.Window "WEEK" }} Current Weekly Streak {{ else }}
Current Daily Streak {{ end }}
</p>
<p>Current Daily Streak</p>
<div class="flex items-end text-sm text-gray-400">
{{ $item.CurrentStreakStartDate }} ➞ {{ $item.CurrentStreakEndDate
}}
{{ .Data.DailyStreak.CurrentStreakStartDate }} ➞ {{
.Data.DailyStreak.CurrentStreakEndDate }}
</div>
</div>
<div class="flex items-end font-bold">{{ $item.CurrentStreak }}</div>
<div class="flex items-end font-bold">
{{ .Data.DailyStreak.CurrentStreak }}
</div>
</div>
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
<div>
<p>
{{ if eq $item.Window "WEEK" }} Best Weekly Streak {{ else }} Best
Daily Streak {{ end }}
</p>
<p>Best Daily Streak</p>
<div class="flex items-end text-sm text-gray-400">
{{ $item.MaxStreakStartDate }} ➞ {{ $item.MaxStreakEndDate }}
{{ .Data.DailyStreak.MaxStreakStartDate }} ➞ {{
.Data.DailyStreak.MaxStreakEndDate }}
</div>
</div>
<div class="flex items-end font-bold">{{ $item.MaxStreak }}</div>
<div class="flex items-end font-bold">
{{ .Data.DailyStreak.MaxStreak }}
</div>
</div>
</div>
</div>
</div>
<div class="w-full">
<div
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
>
<p
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
>
Weekly Read Streak
</p>
<div class="flex items-end my-6 space-x-2">
<p class="text-5xl font-bold text-black dark:text-white">
{{ .Data.WeeklyStreak.CurrentStreak }}
</p>
</div>
<div class="dark:text-white">
<div
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
>
<div>
<p>Current Weekly Streak</p>
<div class="flex items-end text-sm text-gray-400">
{{ .Data.WeeklyStreak.CurrentStreakStartDate }} ➞ {{
.Data.WeeklyStreak.CurrentStreakEndDate }}
</div>
</div>
<div class="flex items-end font-bold">
{{ .Data.WeeklyStreak.CurrentStreak }}
</div>
</div>
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
<div>
<p>Best Weekly Streak</p>
<div class="flex items-end text-sm text-gray-400">
{{ .Data.WeeklyStreak.MaxStreakStartDate }} ➞ {{
.Data.WeeklyStreak.MaxStreakEndDate }}
</div>
</div>
<div class="flex items-end font-bold">
{{ .Data.WeeklyStreak.MaxStreak }}
</div>
</div>
</div>
</div>
</div>
{{ end }}
</div>
{{end}}

View File

@ -56,10 +56,6 @@ func GetUTCOffsets() []UTCOffset {
}
func NiceSeconds(input int64) (result string) {
if input == 0 {
return "N/A"
}
days := math.Floor(float64(input) / 60 / 60 / 24)
seconds := input % (60 * 60 * 24)
hours := math.Floor(float64(seconds) / 60 / 60)