Compare commits
2 Commits
91bd25b5b8
...
317a1e3145
Author | SHA1 | Date | |
---|---|---|---|
317a1e3145 | |||
5cd4e165b0 |
@ -8,7 +8,6 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
argon2 "github.com/alexedwards/argon2id"
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
"github.com/gabriel-vasile/mimetype"
|
"github.com/gabriel-vasile/mimetype"
|
||||||
@ -126,12 +125,12 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
|
|||||||
}
|
}
|
||||||
|
|
||||||
statistics := gin.H{
|
statistics := gin.H{
|
||||||
"TotalTimeLeftSeconds": (document.TotalPages - document.CurrentPage) * document.SecondsPerPage,
|
"TotalTimeLeftSeconds": (document.Pages - document.Page) * document.SecondsPerPage,
|
||||||
"WordsPerMinute": "N/A",
|
"WordsPerMinute": "N/A",
|
||||||
}
|
}
|
||||||
|
|
||||||
if document.Words != nil && *document.Words != 0 {
|
if document.Words != nil && *document.Words != 0 && document.TotalTimeSeconds != 0 {
|
||||||
statistics["WordsPerMinute"] = (*document.Words / document.TotalPages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
|
statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
templateVars["RelBase"] = "../"
|
templateVars["RelBase"] = "../"
|
||||||
@ -158,37 +157,13 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
|
|||||||
|
|
||||||
templateVars["Data"] = activity
|
templateVars["Data"] = activity
|
||||||
} else if routeName == "home" {
|
} else if routeName == "home" {
|
||||||
start_time := time.Now()
|
|
||||||
weekly_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{
|
|
||||||
UserID: userID,
|
|
||||||
Window: "WEEK",
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
|
|
||||||
}
|
|
||||||
log.Debug("GetUserWindowStreaks - WEEK - ", time.Since(start_time))
|
|
||||||
start_time = time.Now()
|
|
||||||
|
|
||||||
daily_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{
|
streaks, _ := api.DB.Queries.GetUserStreaks(api.DB.Ctx, userID)
|
||||||
UserID: userID,
|
|
||||||
Window: "DAY",
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
|
|
||||||
}
|
|
||||||
log.Debug("GetUserWindowStreaks - DAY - ", time.Since(start_time))
|
|
||||||
|
|
||||||
start_time = time.Now()
|
|
||||||
database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID)
|
database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID)
|
||||||
log.Debug("GetDatabaseInfo - ", time.Since(start_time))
|
|
||||||
|
|
||||||
start_time = time.Now()
|
|
||||||
read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID)
|
read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID)
|
||||||
log.Debug("GetDailyReadStats - ", time.Since(start_time))
|
|
||||||
|
|
||||||
templateVars["Data"] = gin.H{
|
templateVars["Data"] = gin.H{
|
||||||
"DailyStreak": daily_streak,
|
"Streaks": streaks,
|
||||||
"WeeklyStreak": weekly_streak,
|
|
||||||
"DatabaseInfo": database_info,
|
"DatabaseInfo": database_info,
|
||||||
"GraphData": read_graph_data,
|
"GraphData": read_graph_data,
|
||||||
}
|
}
|
||||||
@ -513,12 +488,12 @@ func (api *API) identifyDocument(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
statistics := gin.H{
|
statistics := gin.H{
|
||||||
"TotalTimeLeftSeconds": (document.TotalPages - document.CurrentPage) * document.SecondsPerPage,
|
"TotalTimeLeftSeconds": (document.Pages - document.Page) * document.SecondsPerPage,
|
||||||
"WordsPerMinute": "N/A",
|
"WordsPerMinute": "N/A",
|
||||||
}
|
}
|
||||||
|
|
||||||
if document.Words != nil && *document.Words != 0 {
|
if document.Words != nil && *document.Words != 0 {
|
||||||
statistics["WordsPerMinute"] = (*document.Words / document.TotalPages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
|
statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
templateVars["Data"] = document
|
templateVars["Data"] = document
|
||||||
|
@ -22,11 +22,11 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type activityItem struct {
|
type activityItem struct {
|
||||||
DocumentID string `json:"document"`
|
DocumentID string `json:"document"`
|
||||||
StartTime int64 `json:"start_time"`
|
StartTime int64 `json:"start_time"`
|
||||||
Duration int64 `json:"duration"`
|
Duration int64 `json:"duration"`
|
||||||
CurrentPage int64 `json:"current_page"`
|
Page int64 `json:"page"`
|
||||||
TotalPages int64 `json:"total_pages"`
|
Pages int64 `json:"pages"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type requestActivity struct {
|
type requestActivity struct {
|
||||||
@ -256,13 +256,13 @@ func (api *API) addActivities(c *gin.Context) {
|
|||||||
// Add All Activity
|
// Add All Activity
|
||||||
for _, item := range rActivity.Activity {
|
for _, item := range rActivity.Activity {
|
||||||
if _, err := qtx.AddActivity(api.DB.Ctx, database.AddActivityParams{
|
if _, err := qtx.AddActivity(api.DB.Ctx, database.AddActivityParams{
|
||||||
UserID: rUser.(string),
|
UserID: rUser.(string),
|
||||||
DocumentID: item.DocumentID,
|
DocumentID: item.DocumentID,
|
||||||
DeviceID: rActivity.DeviceID,
|
DeviceID: rActivity.DeviceID,
|
||||||
StartTime: time.Unix(int64(item.StartTime), 0).UTC(),
|
StartTime: time.Unix(int64(item.StartTime), 0).UTC(),
|
||||||
Duration: int64(item.Duration),
|
Duration: int64(item.Duration),
|
||||||
CurrentPage: int64(item.CurrentPage),
|
Page: int64(item.Page),
|
||||||
TotalPages: int64(item.TotalPages),
|
Pages: int64(item.Pages),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[addActivities] AddActivity DB Error:", err)
|
log.Error("[addActivities] AddActivity DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"})
|
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"})
|
||||||
@ -277,6 +277,11 @@ func (api *API) addActivities(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update Temp Tables
|
||||||
|
if err := api.DB.CacheTempTables(); err != nil {
|
||||||
|
log.Warn("[addActivities] CacheTempTables Failure: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
c.JSON(http.StatusOK, gin.H{
|
||||||
"added": len(rActivity.Activity),
|
"added": len(rActivity.Activity),
|
||||||
})
|
})
|
||||||
|
@ -75,7 +75,7 @@ local STATISTICS_ACTIVITY_SINCE_QUERY = [[
|
|||||||
JOIN book AS b
|
JOIN book AS b
|
||||||
ON b.id = psd.id_book
|
ON b.id = psd.id_book
|
||||||
WHERE start_time > %d
|
WHERE start_time > %d
|
||||||
ORDER BY start_time ASC LIMIT 1000;
|
ORDER BY start_time ASC LIMIT 5000;
|
||||||
]]
|
]]
|
||||||
|
|
||||||
local STATISTICS_BOOK_QUERY = [[
|
local STATISTICS_BOOK_QUERY = [[
|
||||||
@ -907,7 +907,7 @@ function SyncNinja:getStatisticsActivity(timestamp)
|
|||||||
local conn = SQ3.open(statistics_db)
|
local conn = SQ3.open(statistics_db)
|
||||||
local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY,
|
local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY,
|
||||||
timestamp))
|
timestamp))
|
||||||
local rows = stmt:resultset("i", 1000)
|
local rows = stmt:resultset("i", 5000)
|
||||||
conn:close()
|
conn:close()
|
||||||
|
|
||||||
-- No Results
|
-- No Results
|
||||||
@ -919,8 +919,8 @@ function SyncNinja:getStatisticsActivity(timestamp)
|
|||||||
document = rows[1][i],
|
document = rows[1][i],
|
||||||
start_time = tonumber(rows[2][i]),
|
start_time = tonumber(rows[2][i]),
|
||||||
duration = tonumber(rows[3][i]),
|
duration = tonumber(rows[3][i]),
|
||||||
current_page = tonumber(rows[4][i]),
|
page = tonumber(rows[4][i]),
|
||||||
total_pages = tonumber(rows[5][i])
|
pages = tonumber(rows[5][i])
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -21,6 +21,9 @@ type DBManager struct {
|
|||||||
//go:embed schema.sql
|
//go:embed schema.sql
|
||||||
var ddl string
|
var ddl string
|
||||||
|
|
||||||
|
//go:embed update_temp_tables.sql
|
||||||
|
var tsql string
|
||||||
|
|
||||||
func NewMgr(c *config.Config) *DBManager {
|
func NewMgr(c *config.Config) *DBManager {
|
||||||
// Create Manager
|
// Create Manager
|
||||||
dbm := &DBManager{
|
dbm := &DBManager{
|
||||||
@ -44,22 +47,23 @@ func NewMgr(c *config.Config) *DBManager {
|
|||||||
log.Fatal("Unsupported Database")
|
log.Fatal("Unsupported Database")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create Tables
|
|
||||||
if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dbm.Queries = New(dbm.DB)
|
dbm.Queries = New(dbm.DB)
|
||||||
|
|
||||||
return dbm
|
return dbm
|
||||||
}
|
}
|
||||||
|
|
||||||
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
|
func (dbm *DBManager) CacheTempTables() error {
|
||||||
if err := conn.RegisterFunc("test_func", func() string {
|
if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil {
|
||||||
return "FOOBAR"
|
|
||||||
}, false); err != nil {
|
|
||||||
log.Info("Error Registering Function")
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
|
||||||
|
// Create Tables
|
||||||
|
log.Debug("Creating Schema")
|
||||||
|
if _, err := conn.Exec(ddl, nil); err != nil {
|
||||||
|
log.Warn("Create Schema Failure: ", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
@ -9,15 +9,14 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Activity struct {
|
type Activity struct {
|
||||||
ID int64 `json:"id"`
|
UserID string `json:"user_id"`
|
||||||
UserID string `json:"user_id"`
|
DocumentID string `json:"document_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DeviceID string `json:"device_id"`
|
||||||
DeviceID string `json:"device_id"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
StartTime time.Time `json:"start_time"`
|
StartTime time.Time `json:"start_time"`
|
||||||
Duration int64 `json:"duration"`
|
Page int64 `json:"page"`
|
||||||
CurrentPage int64 `json:"current_page"`
|
Pages int64 `json:"pages"`
|
||||||
TotalPages int64 `json:"total_pages"`
|
Duration int64 `json:"duration"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Device struct {
|
type Device struct {
|
||||||
@ -80,13 +79,16 @@ type Metadatum struct {
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RescaledActivity struct {
|
type RawActivity struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
DeviceID string `json:"device_id"`
|
DeviceID string `json:"device_id"`
|
||||||
UserID string `json:"user_id"`
|
|
||||||
StartTime time.Time `json:"start_time"`
|
StartTime time.Time `json:"start_time"`
|
||||||
Page int64 `json:"page"`
|
Page int64 `json:"page"`
|
||||||
|
Pages int64 `json:"pages"`
|
||||||
Duration int64 `json:"duration"`
|
Duration int64 `json:"duration"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type User struct {
|
type User struct {
|
||||||
@ -96,3 +98,36 @@ type User struct {
|
|||||||
TimeOffset *string `json:"time_offset"`
|
TimeOffset *string `json:"time_offset"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UserStreak struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Window string `json:"window"`
|
||||||
|
MaxStreak int64 `json:"max_streak"`
|
||||||
|
MaxStreakStartDate string `json:"max_streak_start_date"`
|
||||||
|
MaxStreakEndDate string `json:"max_streak_end_date"`
|
||||||
|
CurrentStreak int64 `json:"current_streak"`
|
||||||
|
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
||||||
|
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ViewRescaledActivity struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
DocumentID string `json:"document_id"`
|
||||||
|
DeviceID string `json:"device_id"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
StartTime time.Time `json:"start_time"`
|
||||||
|
Page int64 `json:"page"`
|
||||||
|
Pages int64 `json:"pages"`
|
||||||
|
Duration int64 `json:"duration"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ViewUserStreak struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Window string `json:"window"`
|
||||||
|
MaxStreak interface{} `json:"max_streak"`
|
||||||
|
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
|
||||||
|
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
|
||||||
|
CurrentStreak interface{} `json:"current_streak"`
|
||||||
|
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
|
||||||
|
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
|
||||||
|
}
|
||||||
|
@ -135,14 +135,14 @@ AND user_id = $user_id
|
|||||||
ORDER BY start_time DESC LIMIT 1;
|
ORDER BY start_time DESC LIMIT 1;
|
||||||
|
|
||||||
-- name: AddActivity :one
|
-- name: AddActivity :one
|
||||||
INSERT INTO activity (
|
INSERT INTO raw_activity (
|
||||||
user_id,
|
user_id,
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
start_time,
|
start_time,
|
||||||
duration,
|
duration,
|
||||||
current_page,
|
page,
|
||||||
total_pages
|
pages
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
RETURNING *;
|
RETURNING *;
|
||||||
@ -192,14 +192,14 @@ WITH true_progress AS (
|
|||||||
start_time AS last_read,
|
start_time AS last_read,
|
||||||
SUM(duration) AS total_time_seconds,
|
SUM(duration) AS total_time_seconds,
|
||||||
document_id,
|
document_id,
|
||||||
current_page,
|
page,
|
||||||
total_pages,
|
pages,
|
||||||
|
|
||||||
-- Determine Read Pages
|
-- Determine Read Pages
|
||||||
COUNT(DISTINCT current_page) AS read_pages,
|
COUNT(DISTINCT page) AS read_pages,
|
||||||
|
|
||||||
-- Derive Percentage of Book
|
-- Derive Percentage of Book
|
||||||
ROUND(CAST(current_page AS REAL) / CAST(total_pages AS REAL) * 100, 2) AS percentage
|
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = $user_id
|
WHERE user_id = $user_id
|
||||||
AND document_id = $document_id
|
AND document_id = $document_id
|
||||||
@ -210,8 +210,8 @@ WITH true_progress AS (
|
|||||||
SELECT
|
SELECT
|
||||||
documents.*,
|
documents.*,
|
||||||
|
|
||||||
CAST(IFNULL(current_page, 0) AS INTEGER) AS current_page,
|
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
||||||
CAST(IFNULL(total_pages, 0) AS INTEGER) AS total_pages,
|
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
||||||
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
|
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
|
||||||
@ -220,15 +220,15 @@ SELECT
|
|||||||
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
|
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
|
||||||
-- 2. Divide by Read Pages (Distinct Pages in Activity)
|
-- 2. Divide by Read Pages (Distinct Pages in Activity)
|
||||||
CAST(CASE
|
CAST(CASE
|
||||||
WHEN total_time_seconds IS NULL THEN 0.0
|
WHEN total_time_seconds IS NULL THEN 0.0
|
||||||
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
|
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
|
||||||
END AS INTEGER) AS seconds_per_page,
|
END AS INTEGER) AS seconds_per_page,
|
||||||
|
|
||||||
-- Arbitrarily >97% is Complete
|
-- Arbitrarily >97% is Complete
|
||||||
CAST(CASE
|
CAST(CASE
|
||||||
WHEN percentage > 97.0 THEN 100.0
|
WHEN percentage > 97.0 THEN 100.0
|
||||||
WHEN percentage IS NULL THEN 0.0
|
WHEN percentage IS NULL THEN 0.0
|
||||||
ELSE percentage
|
ELSE percentage
|
||||||
END AS REAL) AS percentage
|
END AS REAL) AS percentage
|
||||||
|
|
||||||
FROM documents
|
FROM documents
|
||||||
@ -244,9 +244,9 @@ WITH true_progress AS (
|
|||||||
start_time AS last_read,
|
start_time AS last_read,
|
||||||
SUM(duration) AS total_time_seconds,
|
SUM(duration) AS total_time_seconds,
|
||||||
document_id,
|
document_id,
|
||||||
current_page,
|
page,
|
||||||
total_pages,
|
pages,
|
||||||
ROUND(CAST(current_page AS REAL) / CAST(total_pages AS REAL) * 100, 2) AS percentage
|
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = $user_id
|
WHERE user_id = $user_id
|
||||||
GROUP BY document_id
|
GROUP BY document_id
|
||||||
@ -255,8 +255,8 @@ WITH true_progress AS (
|
|||||||
SELECT
|
SELECT
|
||||||
documents.*,
|
documents.*,
|
||||||
|
|
||||||
CAST(IFNULL(current_page, 0) AS INTEGER) AS current_page,
|
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
||||||
CAST(IFNULL(total_pages, 0) AS INTEGER) AS total_pages,
|
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
||||||
|
|
||||||
@ -291,23 +291,24 @@ OFFSET $offset;
|
|||||||
-- name: GetActivity :many
|
-- name: GetActivity :many
|
||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time,
|
CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
current_page,
|
page,
|
||||||
total_pages
|
pages
|
||||||
FROM activity
|
FROM activity
|
||||||
LEFT JOIN documents ON documents.id = activity.document_id
|
LEFT JOIN documents ON documents.id = activity.document_id
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
WHERE
|
WHERE
|
||||||
activity.user_id = $user_id
|
activity.user_id = $user_id
|
||||||
AND (
|
AND (
|
||||||
CAST($doc_filter AS BOOLEAN) = TRUE
|
(
|
||||||
AND document_id = $document_id
|
CAST($doc_filter AS BOOLEAN) = TRUE
|
||||||
|
AND document_id = $document_id
|
||||||
|
) OR $doc_filter = FALSE
|
||||||
)
|
)
|
||||||
OR $doc_filter = FALSE
|
ORDER BY activity.start_time DESC
|
||||||
ORDER BY start_time DESC
|
|
||||||
LIMIT $limit
|
LIMIT $limit
|
||||||
OFFSET $offset;
|
OFFSET $offset;
|
||||||
|
|
||||||
@ -324,117 +325,49 @@ GROUP BY activity.device_id;
|
|||||||
|
|
||||||
-- name: GetDocumentReadStats :one
|
-- name: GetDocumentReadStats :one
|
||||||
SELECT
|
SELECT
|
||||||
count(DISTINCT page) AS pages_read,
|
COUNT(DISTINCT page) AS pages_read,
|
||||||
sum(duration) AS total_time
|
SUM(duration) AS total_time
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
WHERE document_id = $document_id
|
WHERE document_id = $document_id
|
||||||
AND user_id = $user_id
|
AND user_id = $user_id
|
||||||
AND start_time >= $start_time;
|
AND start_time >= $start_time;
|
||||||
|
|
||||||
-- name: GetDocumentReadStatsCapped :one
|
-- name: GetDocumentReadStatsCapped :one
|
||||||
WITH capped_stats AS (
|
WITH capped_stats AS (
|
||||||
SELECT min(sum(duration), CAST($page_duration_cap AS INTEGER)) AS durations
|
SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
WHERE document_id = $document_id
|
WHERE document_id = $document_id
|
||||||
AND user_id = $user_id
|
AND user_id = $user_id
|
||||||
AND start_time >= $start_time
|
AND start_time >= $start_time
|
||||||
GROUP BY page
|
GROUP BY page
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(count(*) AS INTEGER) AS pages_read,
|
CAST(COUNT(*) AS INTEGER) AS pages_read,
|
||||||
CAST(sum(durations) AS INTEGER) AS total_time
|
CAST(SUM(durations) AS INTEGER) AS total_time
|
||||||
FROM capped_stats;
|
FROM capped_stats;
|
||||||
|
|
||||||
-- name: GetDocumentDaysRead :one
|
-- name: GetDocumentDaysRead :one
|
||||||
WITH document_days AS (
|
WITH document_days AS (
|
||||||
SELECT DATE(start_time, time_offset) AS dates
|
SELECT DATE(start_time, time_offset) AS dates
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
JOIN users ON users.id = rescaled_activity.user_id
|
JOIN users ON users.id = activity.user_id
|
||||||
WHERE document_id = $document_id
|
WHERE document_id = $document_id
|
||||||
AND user_id = $user_id
|
AND user_id = $user_id
|
||||||
GROUP BY dates
|
GROUP BY dates
|
||||||
)
|
)
|
||||||
SELECT CAST(count(*) AS INTEGER) AS days_read
|
SELECT CAST(COUNT(*) AS INTEGER) AS days_read
|
||||||
FROM document_days;
|
FROM document_days;
|
||||||
|
|
||||||
-- name: GetUserWindowStreaks :one
|
-- name: GetUserStreaks :many
|
||||||
WITH document_windows AS (
|
SELECT * FROM user_streaks
|
||||||
SELECT
|
WHERE user_id = $user_id;
|
||||||
CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
|
|
||||||
END AS read_window,
|
|
||||||
time_offset
|
|
||||||
FROM activity
|
|
||||||
JOIN users ON users.id = activity.user_id
|
|
||||||
WHERE user_id = $user_id
|
|
||||||
AND CAST($window AS TEXT) = CAST($window AS TEXT)
|
|
||||||
GROUP BY read_window
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_windows.*,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_window DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
count(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
time_offset
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY
|
|
||||||
CASE
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
|
|
||||||
END,
|
|
||||||
time_offset
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
LIMIT 1
|
|
||||||
),
|
|
||||||
current_streak AS (
|
|
||||||
SELECT
|
|
||||||
streak AS current_streak,
|
|
||||||
start_date AS current_streak_start_date,
|
|
||||||
end_date AS current_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
WHERE CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN
|
|
||||||
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
|
||||||
WHEN ?2 = "DAY" THEN
|
|
||||||
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset) = current_streak_end_date
|
|
||||||
END
|
|
||||||
LIMIT 1
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
|
|
||||||
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
|
|
||||||
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
|
|
||||||
IFNULL(current_streak, 0) AS current_streak,
|
|
||||||
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak
|
|
||||||
LEFT JOIN current_streak ON 1 = 1
|
|
||||||
LIMIT 1;
|
|
||||||
|
|
||||||
-- name: GetDatabaseInfo :one
|
-- name: GetDatabaseInfo :one
|
||||||
SELECT
|
SELECT
|
||||||
(SELECT count(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size,
|
(SELECT COUNT(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size,
|
||||||
(SELECT count(rowid) FROM documents) AS documents_size,
|
(SELECT COUNT(rowid) FROM documents) AS documents_size,
|
||||||
(SELECT count(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size,
|
(SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size,
|
||||||
(SELECT count(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size
|
(SELECT COUNT(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size
|
||||||
LIMIT 1;
|
LIMIT 1;
|
||||||
|
|
||||||
-- name: GetDailyReadStats :many
|
-- name: GetDailyReadStats :many
|
||||||
@ -448,7 +381,7 @@ WITH RECURSIVE last_30_days AS (
|
|||||||
),
|
),
|
||||||
activity_records AS (
|
activity_records AS (
|
||||||
SELECT
|
SELECT
|
||||||
sum(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
DATE(start_time, time_offset) AS day
|
||||||
FROM activity
|
FROM activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
|
@ -13,49 +13,49 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const addActivity = `-- name: AddActivity :one
|
const addActivity = `-- name: AddActivity :one
|
||||||
INSERT INTO activity (
|
INSERT INTO raw_activity (
|
||||||
user_id,
|
user_id,
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
start_time,
|
start_time,
|
||||||
duration,
|
duration,
|
||||||
current_page,
|
page,
|
||||||
total_pages
|
pages
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
RETURNING id, user_id, document_id, device_id, start_time, duration, current_page, total_pages, created_at
|
RETURNING id, user_id, document_id, device_id, start_time, page, pages, duration, created_at
|
||||||
`
|
`
|
||||||
|
|
||||||
type AddActivityParams struct {
|
type AddActivityParams struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
DeviceID string `json:"device_id"`
|
DeviceID string `json:"device_id"`
|
||||||
StartTime time.Time `json:"start_time"`
|
StartTime time.Time `json:"start_time"`
|
||||||
Duration int64 `json:"duration"`
|
Duration int64 `json:"duration"`
|
||||||
CurrentPage int64 `json:"current_page"`
|
Page int64 `json:"page"`
|
||||||
TotalPages int64 `json:"total_pages"`
|
Pages int64 `json:"pages"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activity, error) {
|
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawActivity, error) {
|
||||||
row := q.db.QueryRowContext(ctx, addActivity,
|
row := q.db.QueryRowContext(ctx, addActivity,
|
||||||
arg.UserID,
|
arg.UserID,
|
||||||
arg.DocumentID,
|
arg.DocumentID,
|
||||||
arg.DeviceID,
|
arg.DeviceID,
|
||||||
arg.StartTime,
|
arg.StartTime,
|
||||||
arg.Duration,
|
arg.Duration,
|
||||||
arg.CurrentPage,
|
arg.Page,
|
||||||
arg.TotalPages,
|
arg.Pages,
|
||||||
)
|
)
|
||||||
var i Activity
|
var i RawActivity
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.UserID,
|
&i.UserID,
|
||||||
&i.DocumentID,
|
&i.DocumentID,
|
||||||
&i.DeviceID,
|
&i.DeviceID,
|
||||||
&i.StartTime,
|
&i.StartTime,
|
||||||
|
&i.Page,
|
||||||
|
&i.Pages,
|
||||||
&i.Duration,
|
&i.Duration,
|
||||||
&i.CurrentPage,
|
|
||||||
&i.TotalPages,
|
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
@ -151,23 +151,24 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error)
|
|||||||
const getActivity = `-- name: GetActivity :many
|
const getActivity = `-- name: GetActivity :many
|
||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time,
|
CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
current_page,
|
page,
|
||||||
total_pages
|
pages
|
||||||
FROM activity
|
FROM activity
|
||||||
LEFT JOIN documents ON documents.id = activity.document_id
|
LEFT JOIN documents ON documents.id = activity.document_id
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
WHERE
|
WHERE
|
||||||
activity.user_id = ?1
|
activity.user_id = ?1
|
||||||
AND (
|
AND (
|
||||||
CAST(?2 AS BOOLEAN) = TRUE
|
(
|
||||||
AND document_id = ?3
|
CAST(?2 AS BOOLEAN) = TRUE
|
||||||
|
AND document_id = ?3
|
||||||
|
) OR ?2 = FALSE
|
||||||
)
|
)
|
||||||
OR ?2 = FALSE
|
ORDER BY activity.start_time DESC
|
||||||
ORDER BY start_time DESC
|
|
||||||
LIMIT ?5
|
LIMIT ?5
|
||||||
OFFSET ?4
|
OFFSET ?4
|
||||||
`
|
`
|
||||||
@ -181,13 +182,13 @@ type GetActivityParams struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type GetActivityRow struct {
|
type GetActivityRow struct {
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
StartTime string `json:"start_time"`
|
StartTime string `json:"start_time"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
Author *string `json:"author"`
|
Author *string `json:"author"`
|
||||||
Duration int64 `json:"duration"`
|
Duration int64 `json:"duration"`
|
||||||
CurrentPage int64 `json:"current_page"`
|
Page int64 `json:"page"`
|
||||||
TotalPages int64 `json:"total_pages"`
|
Pages int64 `json:"pages"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]GetActivityRow, error) {
|
func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]GetActivityRow, error) {
|
||||||
@ -211,8 +212,8 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Get
|
|||||||
&i.Title,
|
&i.Title,
|
||||||
&i.Author,
|
&i.Author,
|
||||||
&i.Duration,
|
&i.Duration,
|
||||||
&i.CurrentPage,
|
&i.Page,
|
||||||
&i.TotalPages,
|
&i.Pages,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -238,7 +239,7 @@ WITH RECURSIVE last_30_days AS (
|
|||||||
),
|
),
|
||||||
activity_records AS (
|
activity_records AS (
|
||||||
SELECT
|
SELECT
|
||||||
sum(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
DATE(start_time, time_offset) AS day
|
||||||
FROM activity
|
FROM activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
@ -290,10 +291,10 @@ func (q *Queries) GetDailyReadStats(ctx context.Context, userID string) ([]GetDa
|
|||||||
|
|
||||||
const getDatabaseInfo = `-- name: GetDatabaseInfo :one
|
const getDatabaseInfo = `-- name: GetDatabaseInfo :one
|
||||||
SELECT
|
SELECT
|
||||||
(SELECT count(rowid) FROM activity WHERE activity.user_id = ?1) AS activity_size,
|
(SELECT COUNT(rowid) FROM activity WHERE activity.user_id = ?1) AS activity_size,
|
||||||
(SELECT count(rowid) FROM documents) AS documents_size,
|
(SELECT COUNT(rowid) FROM documents) AS documents_size,
|
||||||
(SELECT count(rowid) FROM document_progress WHERE document_progress.user_id = ?1) AS progress_size,
|
(SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = ?1) AS progress_size,
|
||||||
(SELECT count(rowid) FROM devices WHERE devices.user_id = ?1) AS devices_size
|
(SELECT COUNT(rowid) FROM devices WHERE devices.user_id = ?1) AS devices_size
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@ -451,13 +452,13 @@ func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document,
|
|||||||
const getDocumentDaysRead = `-- name: GetDocumentDaysRead :one
|
const getDocumentDaysRead = `-- name: GetDocumentDaysRead :one
|
||||||
WITH document_days AS (
|
WITH document_days AS (
|
||||||
SELECT DATE(start_time, time_offset) AS dates
|
SELECT DATE(start_time, time_offset) AS dates
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
JOIN users ON users.id = rescaled_activity.user_id
|
JOIN users ON users.id = activity.user_id
|
||||||
WHERE document_id = ?1
|
WHERE document_id = ?1
|
||||||
AND user_id = ?2
|
AND user_id = ?2
|
||||||
GROUP BY dates
|
GROUP BY dates
|
||||||
)
|
)
|
||||||
SELECT CAST(count(*) AS INTEGER) AS days_read
|
SELECT CAST(COUNT(*) AS INTEGER) AS days_read
|
||||||
FROM document_days
|
FROM document_days
|
||||||
`
|
`
|
||||||
|
|
||||||
@ -475,9 +476,9 @@ func (q *Queries) GetDocumentDaysRead(ctx context.Context, arg GetDocumentDaysRe
|
|||||||
|
|
||||||
const getDocumentReadStats = `-- name: GetDocumentReadStats :one
|
const getDocumentReadStats = `-- name: GetDocumentReadStats :one
|
||||||
SELECT
|
SELECT
|
||||||
count(DISTINCT page) AS pages_read,
|
COUNT(DISTINCT page) AS pages_read,
|
||||||
sum(duration) AS total_time
|
SUM(duration) AS total_time
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
WHERE document_id = ?1
|
WHERE document_id = ?1
|
||||||
AND user_id = ?2
|
AND user_id = ?2
|
||||||
AND start_time >= ?3
|
AND start_time >= ?3
|
||||||
@ -503,16 +504,16 @@ func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadS
|
|||||||
|
|
||||||
const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one
|
const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one
|
||||||
WITH capped_stats AS (
|
WITH capped_stats AS (
|
||||||
SELECT min(sum(duration), CAST(?1 AS INTEGER)) AS durations
|
SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
WHERE document_id = ?2
|
WHERE document_id = ?2
|
||||||
AND user_id = ?3
|
AND user_id = ?3
|
||||||
AND start_time >= ?4
|
AND start_time >= ?4
|
||||||
GROUP BY page
|
GROUP BY page
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(count(*) AS INTEGER) AS pages_read,
|
CAST(COUNT(*) AS INTEGER) AS pages_read,
|
||||||
CAST(sum(durations) AS INTEGER) AS total_time
|
CAST(SUM(durations) AS INTEGER) AS total_time
|
||||||
FROM capped_stats
|
FROM capped_stats
|
||||||
`
|
`
|
||||||
|
|
||||||
@ -546,14 +547,14 @@ WITH true_progress AS (
|
|||||||
start_time AS last_read,
|
start_time AS last_read,
|
||||||
SUM(duration) AS total_time_seconds,
|
SUM(duration) AS total_time_seconds,
|
||||||
document_id,
|
document_id,
|
||||||
current_page,
|
page,
|
||||||
total_pages,
|
pages,
|
||||||
|
|
||||||
-- Determine Read Pages
|
-- Determine Read Pages
|
||||||
COUNT(DISTINCT current_page) AS read_pages,
|
COUNT(DISTINCT page) AS read_pages,
|
||||||
|
|
||||||
-- Derive Percentage of Book
|
-- Derive Percentage of Book
|
||||||
ROUND(CAST(current_page AS REAL) / CAST(total_pages AS REAL) * 100, 2) AS percentage
|
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = ?1
|
WHERE user_id = ?1
|
||||||
AND document_id = ?2
|
AND document_id = ?2
|
||||||
@ -564,8 +565,8 @@ WITH true_progress AS (
|
|||||||
SELECT
|
SELECT
|
||||||
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
||||||
|
|
||||||
CAST(IFNULL(current_page, 0) AS INTEGER) AS current_page,
|
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
||||||
CAST(IFNULL(total_pages, 0) AS INTEGER) AS total_pages,
|
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
||||||
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
|
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
|
||||||
@ -574,15 +575,15 @@ SELECT
|
|||||||
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
|
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
|
||||||
-- 2. Divide by Read Pages (Distinct Pages in Activity)
|
-- 2. Divide by Read Pages (Distinct Pages in Activity)
|
||||||
CAST(CASE
|
CAST(CASE
|
||||||
WHEN total_time_seconds IS NULL THEN 0.0
|
WHEN total_time_seconds IS NULL THEN 0.0
|
||||||
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
|
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
|
||||||
END AS INTEGER) AS seconds_per_page,
|
END AS INTEGER) AS seconds_per_page,
|
||||||
|
|
||||||
-- Arbitrarily >97% is Complete
|
-- Arbitrarily >97% is Complete
|
||||||
CAST(CASE
|
CAST(CASE
|
||||||
WHEN percentage > 97.0 THEN 100.0
|
WHEN percentage > 97.0 THEN 100.0
|
||||||
WHEN percentage IS NULL THEN 0.0
|
WHEN percentage IS NULL THEN 0.0
|
||||||
ELSE percentage
|
ELSE percentage
|
||||||
END AS REAL) AS percentage
|
END AS REAL) AS percentage
|
||||||
|
|
||||||
FROM documents
|
FROM documents
|
||||||
@ -618,8 +619,8 @@ type GetDocumentWithStatsRow struct {
|
|||||||
Deleted bool `json:"-"`
|
Deleted bool `json:"-"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
CurrentPage int64 `json:"current_page"`
|
Page int64 `json:"page"`
|
||||||
TotalPages int64 `json:"total_pages"`
|
Pages int64 `json:"pages"`
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
LastRead string `json:"last_read"`
|
LastRead string `json:"last_read"`
|
||||||
ReadPages int64 `json:"read_pages"`
|
ReadPages int64 `json:"read_pages"`
|
||||||
@ -650,8 +651,8 @@ func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithS
|
|||||||
&i.Deleted,
|
&i.Deleted,
|
||||||
&i.UpdatedAt,
|
&i.UpdatedAt,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
&i.CurrentPage,
|
&i.Page,
|
||||||
&i.TotalPages,
|
&i.Pages,
|
||||||
&i.TotalTimeSeconds,
|
&i.TotalTimeSeconds,
|
||||||
&i.LastRead,
|
&i.LastRead,
|
||||||
&i.ReadPages,
|
&i.ReadPages,
|
||||||
@ -722,9 +723,9 @@ WITH true_progress AS (
|
|||||||
start_time AS last_read,
|
start_time AS last_read,
|
||||||
SUM(duration) AS total_time_seconds,
|
SUM(duration) AS total_time_seconds,
|
||||||
document_id,
|
document_id,
|
||||||
current_page,
|
page,
|
||||||
total_pages,
|
pages,
|
||||||
ROUND(CAST(current_page AS REAL) / CAST(total_pages AS REAL) * 100, 2) AS percentage
|
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = ?1
|
WHERE user_id = ?1
|
||||||
GROUP BY document_id
|
GROUP BY document_id
|
||||||
@ -733,8 +734,8 @@ WITH true_progress AS (
|
|||||||
SELECT
|
SELECT
|
||||||
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
||||||
|
|
||||||
CAST(IFNULL(current_page, 0) AS INTEGER) AS current_page,
|
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
||||||
CAST(IFNULL(total_pages, 0) AS INTEGER) AS total_pages,
|
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
||||||
|
|
||||||
@ -779,8 +780,8 @@ type GetDocumentsWithStatsRow struct {
|
|||||||
Deleted bool `json:"-"`
|
Deleted bool `json:"-"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
CurrentPage int64 `json:"current_page"`
|
Page int64 `json:"page"`
|
||||||
TotalPages int64 `json:"total_pages"`
|
Pages int64 `json:"pages"`
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
LastRead string `json:"last_read"`
|
LastRead string `json:"last_read"`
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage float64 `json:"percentage"`
|
||||||
@ -815,8 +816,8 @@ func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWit
|
|||||||
&i.Deleted,
|
&i.Deleted,
|
||||||
&i.UpdatedAt,
|
&i.UpdatedAt,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
&i.CurrentPage,
|
&i.Page,
|
||||||
&i.TotalPages,
|
&i.Pages,
|
||||||
&i.TotalTimeSeconds,
|
&i.TotalTimeSeconds,
|
||||||
&i.LastRead,
|
&i.LastRead,
|
||||||
&i.Percentage,
|
&i.Percentage,
|
||||||
@ -978,105 +979,41 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one
|
const getUserStreaks = `-- name: GetUserStreaks :many
|
||||||
WITH document_windows AS (
|
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks
|
||||||
SELECT
|
WHERE user_id = ?1
|
||||||
CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
|
|
||||||
END AS read_window,
|
|
||||||
time_offset
|
|
||||||
FROM activity
|
|
||||||
JOIN users ON users.id = activity.user_id
|
|
||||||
WHERE user_id = ?1
|
|
||||||
AND CAST(?2 AS TEXT) = CAST(?2 AS TEXT)
|
|
||||||
GROUP BY read_window
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_windows.read_window, document_windows.time_offset,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_window DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
count(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
time_offset
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY
|
|
||||||
CASE
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
|
|
||||||
END,
|
|
||||||
time_offset
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
LIMIT 1
|
|
||||||
),
|
|
||||||
current_streak AS (
|
|
||||||
SELECT
|
|
||||||
streak AS current_streak,
|
|
||||||
start_date AS current_streak_start_date,
|
|
||||||
end_date AS current_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
WHERE CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN
|
|
||||||
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
|
||||||
WHEN ?2 = "DAY" THEN
|
|
||||||
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset) = current_streak_end_date
|
|
||||||
END
|
|
||||||
LIMIT 1
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
|
|
||||||
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
|
|
||||||
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
|
|
||||||
IFNULL(current_streak, 0) AS current_streak,
|
|
||||||
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak
|
|
||||||
LEFT JOIN current_streak ON 1 = 1
|
|
||||||
LIMIT 1
|
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetUserWindowStreaksParams struct {
|
func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStreak, error) {
|
||||||
UserID string `json:"user_id"`
|
rows, err := q.db.QueryContext(ctx, getUserStreaks, userID)
|
||||||
Window string `json:"window"`
|
if err != nil {
|
||||||
}
|
return nil, err
|
||||||
|
}
|
||||||
type GetUserWindowStreaksRow struct {
|
defer rows.Close()
|
||||||
MaxStreak int64 `json:"max_streak"`
|
var items []UserStreak
|
||||||
MaxStreakStartDate string `json:"max_streak_start_date"`
|
for rows.Next() {
|
||||||
MaxStreakEndDate string `json:"max_streak_end_date"`
|
var i UserStreak
|
||||||
CurrentStreak interface{} `json:"current_streak"`
|
if err := rows.Scan(
|
||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
&i.UserID,
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
&i.Window,
|
||||||
}
|
&i.MaxStreak,
|
||||||
|
&i.MaxStreakStartDate,
|
||||||
func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) {
|
&i.MaxStreakEndDate,
|
||||||
row := q.db.QueryRowContext(ctx, getUserWindowStreaks, arg.UserID, arg.Window)
|
&i.CurrentStreak,
|
||||||
var i GetUserWindowStreaksRow
|
&i.CurrentStreakStartDate,
|
||||||
err := row.Scan(
|
&i.CurrentStreakEndDate,
|
||||||
&i.MaxStreak,
|
); err != nil {
|
||||||
&i.MaxStreakStartDate,
|
return nil, err
|
||||||
&i.MaxStreakEndDate,
|
}
|
||||||
&i.CurrentStreak,
|
items = append(items, i)
|
||||||
&i.CurrentStreakStartDate,
|
}
|
||||||
&i.CurrentStreakEndDate,
|
if err := rows.Close(); err != nil {
|
||||||
)
|
return nil, err
|
||||||
return i, err
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getUsers = `-- name: GetUsers :many
|
const getUsers = `-- name: GetUsers :many
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
PRAGMA foreign_keys = ON;
|
PRAGMA foreign_keys = ON;
|
||||||
PRAGMA journal_mode = WAL;
|
PRAGMA journal_mode = WAL;
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
------------------------ Normal Tables ------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
-- Authentication
|
-- Authentication
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
id TEXT NOT NULL PRIMARY KEY,
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
@ -101,17 +105,17 @@ CREATE TABLE IF NOT EXISTS document_progress (
|
|||||||
PRIMARY KEY (user_id, document_id, device_id)
|
PRIMARY KEY (user_id, document_id, device_id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Read Activity
|
-- Raw Read Activity
|
||||||
CREATE TABLE IF NOT EXISTS activity (
|
CREATE TABLE IF NOT EXISTS raw_activity (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
document_id TEXT NOT NULL,
|
document_id TEXT NOT NULL,
|
||||||
device_id TEXT NOT NULL,
|
device_id TEXT NOT NULL,
|
||||||
|
|
||||||
start_time DATETIME NOT NULL,
|
start_time DATETIME NOT NULL,
|
||||||
|
page INTEGER NOT NULL,
|
||||||
|
pages INTEGER NOT NULL,
|
||||||
duration INTEGER NOT NULL,
|
duration INTEGER NOT NULL,
|
||||||
current_page INTEGER NOT NULL,
|
|
||||||
total_pages INTEGER NOT NULL,
|
|
||||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users (id),
|
FOREIGN KEY (user_id) REFERENCES users (id),
|
||||||
@ -119,6 +123,275 @@ CREATE TABLE IF NOT EXISTS activity (
|
|||||||
FOREIGN KEY (device_id) REFERENCES devices (id)
|
FOREIGN KEY (device_id) REFERENCES devices (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
----------------------- Temporary Tables ----------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
-- Temporary Activity Table (Cached from View)
|
||||||
|
CREATE TEMPORARY TABLE IF NOT EXISTS activity (
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
document_id TEXT NOT NULL,
|
||||||
|
device_id TEXT NOT NULL,
|
||||||
|
|
||||||
|
created_at DATETIME NOT NULL,
|
||||||
|
start_time DATETIME NOT NULL,
|
||||||
|
page INTEGER NOT NULL,
|
||||||
|
pages INTEGER NOT NULL,
|
||||||
|
duration INTEGER NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Temporary User Streaks Table (Cached from View)
|
||||||
|
CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
window TEXT NOT NULL,
|
||||||
|
|
||||||
|
max_streak INTEGER NOT NULL,
|
||||||
|
max_streak_start_date TEXT NOT NULL,
|
||||||
|
max_streak_end_date TEXT NOT NULL,
|
||||||
|
|
||||||
|
current_streak INTEGER NOT NULL,
|
||||||
|
current_streak_start_date TEXT NOT NULL,
|
||||||
|
current_streak_end_date TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
--------------------------- Indexes ---------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS temp.activity_start_time ON activity (start_time);
|
||||||
|
CREATE INDEX IF NOT EXISTS temp.activity_user_id ON activity (user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS temp.activity_user_id_document_id ON activity (
|
||||||
|
user_id,
|
||||||
|
document_id
|
||||||
|
);
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
---------------------------- Views ----------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
------- Rescaled Activity ------
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
CREATE VIEW IF NOT EXISTS view_rescaled_activity AS
|
||||||
|
|
||||||
|
WITH RECURSIVE nums (idx) AS (
|
||||||
|
SELECT 1 AS idx
|
||||||
|
UNION ALL
|
||||||
|
SELECT idx + 1
|
||||||
|
FROM nums
|
||||||
|
LIMIT 1000
|
||||||
|
),
|
||||||
|
|
||||||
|
current_pages AS (
|
||||||
|
SELECT
|
||||||
|
document_id,
|
||||||
|
user_id,
|
||||||
|
pages
|
||||||
|
FROM raw_activity
|
||||||
|
GROUP BY document_id, user_id
|
||||||
|
HAVING MAX(start_time)
|
||||||
|
ORDER BY start_time DESC
|
||||||
|
),
|
||||||
|
|
||||||
|
intermediate AS (
|
||||||
|
SELECT
|
||||||
|
raw_activity.document_id,
|
||||||
|
raw_activity.device_id,
|
||||||
|
raw_activity.user_id,
|
||||||
|
raw_activity.created_at,
|
||||||
|
raw_activity.start_time,
|
||||||
|
raw_activity.duration,
|
||||||
|
raw_activity.page,
|
||||||
|
current_pages.pages,
|
||||||
|
|
||||||
|
-- Derive first page
|
||||||
|
((raw_activity.page - 1) * current_pages.pages) / raw_activity.pages
|
||||||
|
+ 1 AS first_page,
|
||||||
|
|
||||||
|
-- Derive last page
|
||||||
|
MAX(
|
||||||
|
((raw_activity.page - 1) * current_pages.pages)
|
||||||
|
/ raw_activity.pages
|
||||||
|
+ 1,
|
||||||
|
(raw_activity.page * current_pages.pages) / raw_activity.pages
|
||||||
|
) AS last_page
|
||||||
|
|
||||||
|
FROM raw_activity
|
||||||
|
INNER JOIN current_pages ON
|
||||||
|
current_pages.document_id = raw_activity.document_id
|
||||||
|
AND current_pages.user_id = raw_activity.user_id
|
||||||
|
),
|
||||||
|
|
||||||
|
num_limit AS (
|
||||||
|
SELECT * FROM nums
|
||||||
|
LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate)
|
||||||
|
),
|
||||||
|
|
||||||
|
rescaled_raw AS (
|
||||||
|
SELECT
|
||||||
|
intermediate.document_id,
|
||||||
|
intermediate.device_id,
|
||||||
|
intermediate.user_id,
|
||||||
|
intermediate.created_at,
|
||||||
|
intermediate.start_time,
|
||||||
|
intermediate.last_page,
|
||||||
|
intermediate.pages,
|
||||||
|
intermediate.first_page + num_limit.idx - 1 AS page,
|
||||||
|
intermediate.duration / (
|
||||||
|
intermediate.last_page - intermediate.first_page + 1.0
|
||||||
|
) AS duration
|
||||||
|
FROM intermediate
|
||||||
|
LEFT JOIN num_limit ON
|
||||||
|
num_limit.idx <= (intermediate.last_page - intermediate.first_page + 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
document_id,
|
||||||
|
device_id,
|
||||||
|
created_at,
|
||||||
|
start_time,
|
||||||
|
page,
|
||||||
|
pages,
|
||||||
|
|
||||||
|
-- Round up if last page (maintains total duration)
|
||||||
|
CAST(CASE
|
||||||
|
WHEN page = last_page AND duration != CAST(duration AS INTEGER)
|
||||||
|
THEN duration + 1
|
||||||
|
ELSE duration
|
||||||
|
END AS INTEGER) AS duration
|
||||||
|
FROM rescaled_raw;
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
--------- User Streaks ---------
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
CREATE VIEW IF NOT EXISTS view_user_streaks AS
|
||||||
|
|
||||||
|
WITH document_windows AS (
|
||||||
|
SELECT
|
||||||
|
activity.user_id,
|
||||||
|
users.time_offset,
|
||||||
|
DATE(
|
||||||
|
activity.start_time,
|
||||||
|
users.time_offset,
|
||||||
|
'weekday 0', '-7 day'
|
||||||
|
) AS weekly_read,
|
||||||
|
DATE(activity.start_time, users.time_offset) AS daily_read
|
||||||
|
FROM raw_activity AS activity
|
||||||
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
|
GROUP BY activity.user_id, weekly_read, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
weekly_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
time_offset,
|
||||||
|
'WEEK' AS "window",
|
||||||
|
weekly_read AS read_window,
|
||||||
|
row_number() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY weekly_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, weekly_read
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
time_offset,
|
||||||
|
'DAY' AS "window",
|
||||||
|
daily_read AS read_window,
|
||||||
|
row_number() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY daily_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
streaks AS (
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
time_offset
|
||||||
|
FROM daily_partitions
|
||||||
|
GROUP BY
|
||||||
|
time_offset,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || seqnum || ' day')
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
time_offset
|
||||||
|
FROM weekly_partitions
|
||||||
|
GROUP BY
|
||||||
|
time_offset,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
||||||
|
),
|
||||||
|
max_streak AS (
|
||||||
|
SELECT
|
||||||
|
MAX(streak) AS max_streak,
|
||||||
|
start_date AS max_streak_start_date,
|
||||||
|
end_date AS max_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
GROUP BY user_id, window
|
||||||
|
),
|
||||||
|
current_streak AS (
|
||||||
|
SELECT
|
||||||
|
streak AS current_streak,
|
||||||
|
start_date AS current_streak_start_date,
|
||||||
|
end_date AS current_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
WHERE CASE
|
||||||
|
WHEN window = "WEEK" THEN
|
||||||
|
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
||||||
|
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
||||||
|
WHEN window = "DAY" THEN
|
||||||
|
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
||||||
|
OR DATE('now', time_offset) = current_streak_end_date
|
||||||
|
END
|
||||||
|
GROUP BY user_id, window
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
max_streak.user_id,
|
||||||
|
max_streak.window,
|
||||||
|
IFNULL(max_streak, 0) AS max_streak,
|
||||||
|
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
||||||
|
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
||||||
|
IFNULL(current_streak, 0) AS current_streak,
|
||||||
|
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
|
||||||
|
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
|
||||||
|
FROM max_streak
|
||||||
|
LEFT JOIN current_streak ON
|
||||||
|
current_streak.user_id = max_streak.user_id
|
||||||
|
AND current_streak.window = max_streak.window;
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
------------------ Populate Temporary Tables ------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
INSERT INTO activity SELECT * FROM view_rescaled_activity;
|
||||||
|
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
--------------------------- Triggers --------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
-- Update Trigger
|
-- Update Trigger
|
||||||
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
|
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
|
||||||
BEFORE UPDATE ON documents BEGIN
|
BEFORE UPDATE ON documents BEGIN
|
||||||
@ -126,59 +399,3 @@ UPDATE documents
|
|||||||
SET updated_at = CURRENT_TIMESTAMP
|
SET updated_at = CURRENT_TIMESTAMP
|
||||||
WHERE id = old.id;
|
WHERE id = old.id;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
-- Rescaled Activity View (Adapted from KOReader)
|
|
||||||
CREATE VIEW IF NOT EXISTS rescaled_activity AS
|
|
||||||
|
|
||||||
WITH RECURSIVE numbers (idx) AS (
|
|
||||||
SELECT 1 AS idx
|
|
||||||
UNION ALL
|
|
||||||
SELECT idx + 1
|
|
||||||
FROM numbers
|
|
||||||
LIMIT 1000
|
|
||||||
),
|
|
||||||
|
|
||||||
total_pages AS (
|
|
||||||
SELECT
|
|
||||||
document_id,
|
|
||||||
total_pages AS pages
|
|
||||||
FROM activity
|
|
||||||
GROUP BY document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
ORDER BY start_time DESC
|
|
||||||
),
|
|
||||||
|
|
||||||
intermediate AS (
|
|
||||||
SELECT
|
|
||||||
activity.document_id,
|
|
||||||
activity.device_id,
|
|
||||||
activity.user_id,
|
|
||||||
activity.current_page,
|
|
||||||
activity.total_pages,
|
|
||||||
total_pages.pages,
|
|
||||||
activity.start_time,
|
|
||||||
activity.duration,
|
|
||||||
numbers.idx,
|
|
||||||
-- Derive First Page
|
|
||||||
((activity.current_page - 1) * total_pages.pages) / activity.total_pages
|
|
||||||
+ 1 AS first_page,
|
|
||||||
-- Derive Last Page
|
|
||||||
MAX(
|
|
||||||
((activity.current_page - 1) * total_pages.pages)
|
|
||||||
/ activity.total_pages
|
|
||||||
+ 1,
|
|
||||||
(activity.current_page * total_pages.pages) / activity.total_pages
|
|
||||||
) AS last_page
|
|
||||||
FROM activity
|
|
||||||
INNER JOIN total_pages ON total_pages.document_id = activity.document_id
|
|
||||||
INNER JOIN numbers ON numbers.idx <= (last_page - first_page + 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
document_id,
|
|
||||||
device_id,
|
|
||||||
user_id,
|
|
||||||
start_time,
|
|
||||||
first_page + idx - 1 AS page,
|
|
||||||
duration / (last_page - first_page + 1) AS duration
|
|
||||||
FROM intermediate;
|
|
||||||
|
4
database/update_temp_tables.sql
Normal file
4
database/update_temp_tables.sql
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
DELETE FROM activity;
|
||||||
|
INSERT INTO activity SELECT * FROM view_rescaled_activity;
|
||||||
|
DELETE FROM user_streaks;
|
||||||
|
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
@ -45,7 +45,7 @@
|
|||||||
<p>{{ $activity.Duration }}</p>
|
<p>{{ $activity.Duration }}</p>
|
||||||
</td>
|
</td>
|
||||||
<td class="p-3 border-b border-gray-200">
|
<td class="p-3 border-b border-gray-200">
|
||||||
<p>{{ $activity.CurrentPage }} / {{ $activity.TotalPages }}</p>
|
<p>{{ $activity.Page }} / {{ $activity.Pages }}</p>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
@ -137,7 +137,7 @@
|
|||||||
<div>
|
<div>
|
||||||
<p class="text-gray-400">Progress</p>
|
<p class="text-gray-400">Progress</p>
|
||||||
<p class="font-medium text-lg">
|
<p class="font-medium text-lg">
|
||||||
{{ .Data.CurrentPage }} / {{ .Data.TotalPages }} ({{ .Data.Percentage }}%)
|
{{ .Data.Page }} / {{ .Data.Pages }} ({{ .Data.Percentage }}%)
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
|
@ -344,7 +344,7 @@
|
|||||||
<div>
|
<div>
|
||||||
<p class="text-gray-500">Progress</p>
|
<p class="text-gray-500">Progress</p>
|
||||||
<p class="font-medium text-lg">
|
<p class="font-medium text-lg">
|
||||||
{{ .Data.CurrentPage }} / {{ .Data.TotalPages }} ({{ .Data.Percentage }}%)
|
{{ .Data.Page }} / {{ .Data.Pages }} ({{ .Data.Percentage }}%)
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<!--
|
<!--
|
||||||
|
@ -10,8 +10,8 @@
|
|||||||
<div class="grid grid-cols-1 gap-4 mb-4 md:grid-cols-2 lg:grid-cols-3">
|
<div class="grid grid-cols-1 gap-4 mb-4 md:grid-cols-2 lg:grid-cols-3">
|
||||||
{{range $doc := .Data }}
|
{{range $doc := .Data }}
|
||||||
<div class="w-full relative">
|
<div class="w-full relative">
|
||||||
<div class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded">
|
<div class="flex gap-4 w-full h-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded">
|
||||||
<div class="min-w-fit h-48 relative">
|
<div class="min-w-fit my-auto h-48 relative">
|
||||||
<a href="./documents/{{$doc.ID}}">
|
<a href="./documents/{{$doc.ID}}">
|
||||||
<img class="rounded object-cover h-full" src="./documents/{{$doc.ID}}/cover"></img>
|
<img class="rounded object-cover h-full" src="./documents/{{$doc.ID}}/cover"></img>
|
||||||
</a>
|
</a>
|
||||||
@ -37,7 +37,7 @@
|
|||||||
<div>
|
<div>
|
||||||
<p class="text-gray-400">Progress</p>
|
<p class="text-gray-400">Progress</p>
|
||||||
<p class="font-medium">
|
<p class="font-medium">
|
||||||
{{ $doc.CurrentPage }} / {{ $doc.TotalPages }} ({{ $doc.Percentage }}%)
|
{{ $doc.Page }} / {{ $doc.Pages }} ({{ $doc.Percentage }}%)
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -100,7 +100,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="grid grid-cols-2 gap-4 my-4 md:grid-cols-4">
|
<div class="grid grid-cols-2 gap-4 my-4 md:grid-cols-4">
|
||||||
<div class="w-full">
|
<a href="./documents" class="w-full">
|
||||||
<div
|
<div
|
||||||
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
||||||
>
|
>
|
||||||
@ -111,9 +111,8 @@
|
|||||||
<p class="text-sm text-gray-400">Documents</p>
|
<p class="text-sm text-gray-400">Documents</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</a>
|
||||||
|
<a href="./activity" class="w-full">
|
||||||
<div class="w-full">
|
|
||||||
<div
|
<div
|
||||||
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
||||||
>
|
>
|
||||||
@ -124,8 +123,7 @@
|
|||||||
<p class="text-sm text-gray-400">Activity Records</p>
|
<p class="text-sm text-gray-400">Activity Records</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</a>
|
||||||
|
|
||||||
<div class="w-full">
|
<div class="w-full">
|
||||||
<div
|
<div
|
||||||
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
||||||
@ -138,7 +136,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="w-full">
|
<div class="w-full">
|
||||||
<div
|
<div
|
||||||
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
class="flex gap-4 w-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded"
|
||||||
@ -154,6 +151,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="grid grid-cols-1 gap-4 my-4 md:grid-cols-2 lg:grid-cols-3">
|
<div class="grid grid-cols-1 gap-4 my-4 md:grid-cols-2 lg:grid-cols-3">
|
||||||
|
{{ range $item := .Data.Streaks }}
|
||||||
<div class="w-full">
|
<div class="w-full">
|
||||||
<div
|
<div
|
||||||
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
|
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
|
||||||
@ -161,11 +159,12 @@
|
|||||||
<p
|
<p
|
||||||
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
|
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
|
||||||
>
|
>
|
||||||
Daily Read Streak
|
{{ if eq $item.Window "WEEK" }} Weekly Read Streak {{ else }} Daily Read
|
||||||
|
Streak {{ end }}
|
||||||
</p>
|
</p>
|
||||||
<div class="flex items-end my-6 space-x-2">
|
<div class="flex items-end my-6 space-x-2">
|
||||||
<p class="text-5xl font-bold text-black dark:text-white">
|
<p class="text-5xl font-bold text-black dark:text-white">
|
||||||
{{ .Data.DailyStreak.CurrentStreak }}
|
{{ $item.CurrentStreak }}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="dark:text-white">
|
<div class="dark:text-white">
|
||||||
@ -173,76 +172,33 @@
|
|||||||
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
|
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
|
||||||
>
|
>
|
||||||
<div>
|
<div>
|
||||||
<p>Current Daily Streak</p>
|
<p>
|
||||||
|
{{ if eq $item.Window "WEEK" }} Current Weekly Streak {{ else }}
|
||||||
|
Current Daily Streak {{ end }}
|
||||||
|
</p>
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
<div class="flex items-end text-sm text-gray-400">
|
||||||
{{ .Data.DailyStreak.CurrentStreakStartDate }} ➞ {{
|
{{ $item.CurrentStreakStartDate }} ➞ {{ $item.CurrentStreakEndDate
|
||||||
.Data.DailyStreak.CurrentStreakEndDate }}
|
}}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="flex items-end font-bold">
|
<div class="flex items-end font-bold">{{ $item.CurrentStreak }}</div>
|
||||||
{{ .Data.DailyStreak.CurrentStreak }}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
|
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
|
||||||
<div>
|
<div>
|
||||||
<p>Best Daily Streak</p>
|
<p>
|
||||||
|
{{ if eq $item.Window "WEEK" }} Best Weekly Streak {{ else }} Best
|
||||||
|
Daily Streak {{ end }}
|
||||||
|
</p>
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
<div class="flex items-end text-sm text-gray-400">
|
||||||
{{ .Data.DailyStreak.MaxStreakStartDate }} ➞ {{
|
{{ $item.MaxStreakStartDate }} ➞ {{ $item.MaxStreakEndDate }}
|
||||||
.Data.DailyStreak.MaxStreakEndDate }}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="flex items-end font-bold">
|
<div class="flex items-end font-bold">{{ $item.MaxStreak }}</div>
|
||||||
{{ .Data.DailyStreak.MaxStreak }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="w-full">
|
|
||||||
<div
|
|
||||||
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
|
|
||||||
>
|
|
||||||
<p
|
|
||||||
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
|
|
||||||
>
|
|
||||||
Weekly Read Streak
|
|
||||||
</p>
|
|
||||||
<div class="flex items-end my-6 space-x-2">
|
|
||||||
<p class="text-5xl font-bold text-black dark:text-white">
|
|
||||||
{{ .Data.WeeklyStreak.CurrentStreak }}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="dark:text-white">
|
|
||||||
<div
|
|
||||||
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
|
|
||||||
>
|
|
||||||
<div>
|
|
||||||
<p>Current Weekly Streak</p>
|
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
|
||||||
{{ .Data.WeeklyStreak.CurrentStreakStartDate }} ➞ {{
|
|
||||||
.Data.WeeklyStreak.CurrentStreakEndDate }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-end font-bold">
|
|
||||||
{{ .Data.WeeklyStreak.CurrentStreak }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
|
|
||||||
<div>
|
|
||||||
<p>Best Weekly Streak</p>
|
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
|
||||||
{{ .Data.WeeklyStreak.MaxStreakStartDate }} ➞ {{
|
|
||||||
.Data.WeeklyStreak.MaxStreakEndDate }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-end font-bold">
|
|
||||||
{{ .Data.WeeklyStreak.MaxStreak }}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
{{ end }}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{{end}}
|
{{end}}
|
||||||
|
@ -56,6 +56,10 @@ func GetUTCOffsets() []UTCOffset {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NiceSeconds(input int64) (result string) {
|
func NiceSeconds(input int64) (result string) {
|
||||||
|
if input == 0 {
|
||||||
|
return "N/A"
|
||||||
|
}
|
||||||
|
|
||||||
days := math.Floor(float64(input) / 60 / 60 / 24)
|
days := math.Floor(float64(input) / 60 / 60 / 24)
|
||||||
seconds := input % (60 * 60 * 24)
|
seconds := input % (60 * 60 * 24)
|
||||||
hours := math.Floor(float64(seconds) / 60 / 60)
|
hours := math.Floor(float64(seconds) / 60 / 60)
|
||||||
|
Loading…
Reference in New Issue
Block a user