Compare commits

..

No commits in common. "2d206826d63abddd9c787a2474206fea41820b17" and "8e81acd38178b01c9db64037b47526812a3042eb" have entirely different histories.

15 changed files with 435 additions and 608 deletions

View File

@ -157,7 +157,6 @@ func (api *API) registerWebAppRoutes(router *gin.Engine) {
router.GET("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminImport) router.GET("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminImport)
router.POST("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminImport) router.POST("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminImport)
router.GET("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminUsers) router.GET("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminUsers)
router.POST("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appUpdateAdminUsers)
router.GET("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdmin) router.GET("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdmin)
router.POST("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminAction) router.POST("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminAction)
router.POST("/login", api.appAuthLogin) router.POST("/login", api.appAuthLogin)
@ -229,7 +228,7 @@ func (api *API) generateTemplates() *multitemplate.Renderer {
"dict": dict, "dict": dict,
"fields": fields, "fields": fields,
"getSVGGraphData": getSVGGraphData, "getSVGGraphData": getSVGGraphData,
"getTimeZones": getTimeZones, "getUTCOffsets": getUTCOffsets,
"hasPrefix": strings.HasPrefix, "hasPrefix": strings.HasPrefix,
"niceNumbers": niceNumbers, "niceNumbers": niceNumbers,
"niceSeconds": niceSeconds, "niceSeconds": niceSeconds,

View File

@ -54,20 +54,6 @@ type requestAdminImport struct {
Type importType `form:"type"` Type importType `form:"type"`
} }
type operationType string
const (
opUpdate operationType = "UPDATE"
opCreate operationType = "CREATE"
)
type requestAdminUpdateUser struct {
User string `form:"user"`
Password string `form:"password"`
isAdmin bool `form:"is_admin"`
Operation operationType `form:"operation"`
}
type requestAdminLogs struct { type requestAdminLogs struct {
Filter string `form:"filter"` Filter string `form:"filter"`
} }
@ -239,45 +225,6 @@ func (api *API) appGetAdminUsers(c *gin.Context) {
c.HTML(http.StatusOK, "page/admin-users", templateVars) c.HTML(http.StatusOK, "page/admin-users", templateVars)
} }
func (api *API) appUpdateAdminUsers(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
var rAdminUserUpdate requestAdminUpdateUser
if err := c.ShouldBind(&rAdminUserUpdate); err != nil {
log.Error("Invalid URI Bind")
appErrorPage(c, http.StatusNotFound, "Invalid user update")
return
}
var err error
switch rAdminUserUpdate.Operation {
case opCreate:
err = api.createUser(rAdminUserUpdate.User, rAdminUserUpdate.Password)
case opUpdate:
err = fmt.Errorf("unimplemented")
default:
appErrorPage(c, http.StatusNotFound, "Unknown user operation")
return
}
if err != nil {
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Unable to create user: %v", err))
return
}
users, err := api.db.Queries.GetUsers(api.db.Ctx)
if err != nil {
log.Error("GetUsers DB Error: ", err)
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
return
}
templateVars["Data"] = users
c.HTML(http.StatusOK, "page/admin-users", templateVars)
}
func (api *API) appGetAdminImport(c *gin.Context) { func (api *API) appGetAdminImport(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin-import", c) templateVars, _ := api.getBaseTemplateVars("admin-import", c)

View File

@ -69,7 +69,7 @@ type requestDocumentIdentify struct {
type requestSettingsEdit struct { type requestSettingsEdit struct {
Password *string `form:"password"` Password *string `form:"password"`
NewPassword *string `form:"new_password"` NewPassword *string `form:"new_password"`
Timezone *string `form:"timezone"` TimeOffset *string `form:"time_offset"`
} }
type requestDocumentAdd struct { type requestDocumentAdd struct {
@ -299,8 +299,8 @@ func (api *API) appGetSettings(c *gin.Context) {
} }
templateVars["Data"] = gin.H{ templateVars["Data"] = gin.H{
"Timezone": *user.Timezone, "TimeOffset": *user.TimeOffset,
"Devices": devices, "Devices": devices,
} }
c.HTML(http.StatusOK, "page/settings", templateVars) c.HTML(http.StatusOK, "page/settings", templateVars)
@ -869,7 +869,7 @@ func (api *API) appEditSettings(c *gin.Context) {
} }
// Validate Something Exists // Validate Something Exists
if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.Timezone == nil { if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.TimeOffset == nil {
log.Error("Missing Form Values") log.Error("Missing Form Values")
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values") appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
return return
@ -901,9 +901,9 @@ func (api *API) appEditSettings(c *gin.Context) {
} }
// Set Time Offset // Set Time Offset
if rUserSettings.Timezone != nil { if rUserSettings.TimeOffset != nil {
templateVars["TimeOffsetMessage"] = "Time Offset Updated" templateVars["TimeOffsetMessage"] = "Time Offset Updated"
newUserSettings.Timezone = rUserSettings.Timezone newUserSettings.TimeOffset = rUserSettings.TimeOffset
} }
// Update User // Update User
@ -931,8 +931,8 @@ func (api *API) appEditSettings(c *gin.Context) {
} }
templateVars["Data"] = gin.H{ templateVars["Data"] = gin.H{
"Timezone": *user.Timezone, "TimeOffset": *user.TimeOffset,
"Devices": devices, "Devices": devices,
} }
c.HTML(http.StatusOK, "page/settings", templateVars) c.HTML(http.StatusOK, "page/settings", templateVars)

View File

@ -460,55 +460,3 @@ func (api *API) rotateAllAuthHashes() error {
return nil return nil
} }
func (api *API) createUser(username string, rawPassword string) error {
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
if username == "" {
return fmt.Errorf("username can't be empty")
}
if rawPassword == "" {
return fmt.Errorf("password can't be empty")
}
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
if err != nil {
return fmt.Errorf("unable to create hashed password")
}
// Generate auth hash
rawAuthHash, err := utils.GenerateToken(64)
if err != nil {
return fmt.Errorf("unable to create token for user")
}
// Get current users
currentUsers, err := api.db.Queries.GetUsers(api.db.Ctx)
if err != nil {
return fmt.Errorf("unable to get current users")
}
// Determine if we should be admin
isAdmin := false
if len(currentUsers) == 0 {
isAdmin = true
}
// Create user in DB
authHash := fmt.Sprintf("%x", rawAuthHash)
if rows, err := api.db.Queries.CreateUser(api.db.Ctx, database.CreateUserParams{
ID: username,
Pass: &hashedPassword,
AuthHash: &authHash,
Admin: isAdmin,
}); err != nil {
log.Error("CreateUser DB Error:", err)
return fmt.Errorf("unable to create user")
} else if rows == 0 {
log.Warn("User Already Exists:", username)
return fmt.Errorf("user already exists")
}
return nil
}

View File

@ -13,43 +13,54 @@ import (
"reichard.io/antholume/metadata" "reichard.io/antholume/metadata"
) )
func getTimeZones() []string { type UTCOffset struct {
return []string{ Name string
"Africa/Cairo", Value string
"Africa/Johannesburg", }
"Africa/Lagos",
"Africa/Nairobi", var UTC_OFFSETS = []UTCOffset{
"America/Adak", {Value: "-12 hours", Name: "UTC12:00"},
"America/Anchorage", {Value: "-11 hours", Name: "UTC11:00"},
"America/Buenos_Aires", {Value: "-10 hours", Name: "UTC10:00"},
"America/Chicago", {Value: "-9.5 hours", Name: "UTC09:30"},
"America/Denver", {Value: "-9 hours", Name: "UTC09:00"},
"America/Los_Angeles", {Value: "-8 hours", Name: "UTC08:00"},
"America/Mexico_City", {Value: "-7 hours", Name: "UTC07:00"},
"America/New_York", {Value: "-6 hours", Name: "UTC06:00"},
"America/Nuuk", {Value: "-5 hours", Name: "UTC05:00"},
"America/Phoenix", {Value: "-4 hours", Name: "UTC04:00"},
"America/Puerto_Rico", {Value: "-3.5 hours", Name: "UTC03:30"},
"America/Sao_Paulo", {Value: "-3 hours", Name: "UTC03:00"},
"America/St_Johns", {Value: "-2 hours", Name: "UTC02:00"},
"America/Toronto", {Value: "-1 hours", Name: "UTC01:00"},
"Asia/Dubai", {Value: "0 hours", Name: "UTC±00:00"},
"Asia/Hong_Kong", {Value: "+1 hours", Name: "UTC+01:00"},
"Asia/Kolkata", {Value: "+2 hours", Name: "UTC+02:00"},
"Asia/Seoul", {Value: "+3 hours", Name: "UTC+03:00"},
"Asia/Shanghai", {Value: "+3.5 hours", Name: "UTC+03:30"},
"Asia/Singapore", {Value: "+4 hours", Name: "UTC+04:00"},
"Asia/Tokyo", {Value: "+4.5 hours", Name: "UTC+04:30"},
"Atlantic/Azores", {Value: "+5 hours", Name: "UTC+05:00"},
"Australia/Melbourne", {Value: "+5.5 hours", Name: "UTC+05:30"},
"Australia/Sydney", {Value: "+5.75 hours", Name: "UTC+05:45"},
"Europe/Berlin", {Value: "+6 hours", Name: "UTC+06:00"},
"Europe/London", {Value: "+6.5 hours", Name: "UTC+06:30"},
"Europe/Moscow", {Value: "+7 hours", Name: "UTC+07:00"},
"Europe/Paris", {Value: "+8 hours", Name: "UTC+08:00"},
"Pacific/Auckland", {Value: "+8.75 hours", Name: "UTC+08:45"},
"Pacific/Honolulu", {Value: "+9 hours", Name: "UTC+09:00"},
} {Value: "+9.5 hours", Name: "UTC+09:30"},
{Value: "+10 hours", Name: "UTC+10:00"},
{Value: "+10.5 hours", Name: "UTC+10:30"},
{Value: "+11 hours", Name: "UTC+11:00"},
{Value: "+12 hours", Name: "UTC+12:00"},
{Value: "+12.75 hours", Name: "UTC+12:45"},
{Value: "+13 hours", Name: "UTC+13:00"},
{Value: "+14 hours", Name: "UTC+14:00"},
}
func getUTCOffsets() []UTCOffset {
return UTC_OFFSETS
} }
func niceSeconds(input int64) (result string) { func niceSeconds(input int64) (result string) {

View File

@ -3,17 +3,15 @@ package database
import ( import (
"context" "context"
"database/sql" "database/sql"
"database/sql/driver"
"embed" "embed"
_ "embed" _ "embed"
"errors"
"fmt" "fmt"
"path/filepath" "path/filepath"
"time" "time"
"github.com/pressly/goose/v3" "github.com/pressly/goose/v3"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
sqlite "modernc.org/sqlite" _ "modernc.org/sqlite"
"reichard.io/antholume/config" "reichard.io/antholume/config"
_ "reichard.io/antholume/database/migrations" _ "reichard.io/antholume/database/migrations"
) )
@ -28,21 +26,9 @@ type DBManager struct {
//go:embed schema.sql //go:embed schema.sql
var ddl string var ddl string
//go:embed views.sql
var views string
//go:embed migrations/* //go:embed migrations/*
var migrations embed.FS var migrations embed.FS
// Register scalar sqlite function on init
func init() {
sqlite.MustRegisterFunction("LOCAL_TIME", &sqlite.FunctionImpl{
NArgs: 2,
Deterministic: true,
Scalar: localTime,
})
}
// Returns an initialized manager // Returns an initialized manager
func NewMgr(c *config.Config) *DBManager { func NewMgr(c *config.Config) *DBManager {
// Create Manager // Create Manager
@ -104,12 +90,6 @@ func (dbm *DBManager) init() error {
return err return err
} }
// Execute views
if _, err := dbm.DB.Exec(views, nil); err != nil {
log.Panicf("Error executing views: %v", err)
return err
}
// Update settings // Update settings
err = dbm.updateSettings() err = dbm.updateSettings()
if err != nil { if err != nil {
@ -202,7 +182,6 @@ func (dbm *DBManager) performMigrations(isNew bool) error {
return goose.UpContext(ctx, dbm.DB, "migrations") return goose.UpContext(ctx, dbm.DB, "migrations")
} }
// Determines whether the database is empty
func isEmpty(db *sql.DB) (bool, error) { func isEmpty(db *sql.DB) (bool, error) {
var tableCount int var tableCount int
err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount) err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount)
@ -211,28 +190,3 @@ func isEmpty(db *sql.DB) (bool, error) {
} }
return tableCount == 0, nil return tableCount == 0, nil
} }
// LOCAL_TIME custom SQL function
func localTime(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
timeStr, ok := args[0].(string)
if !ok {
return nil, errors.New("both arguments to TZTime must be strings")
}
timeZoneStr, ok := args[1].(string)
if !ok {
return nil, errors.New("both arguments to TZTime must be strings")
}
timeZone, err := time.LoadLocation(timeZoneStr)
if err != nil {
return nil, errors.New("unable to parse timezone")
}
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
if err != nil {
return nil, errors.New("unable to parse time")
}
return formattedTime.In(timeZone).Format("2006-01-02 15:04:05.000"), nil
}

View File

@ -1,58 +0,0 @@
package migrations
import (
"context"
"database/sql"
"github.com/pressly/goose/v3"
)
func init() {
goose.AddMigrationContext(upUserTimezone, downUserTimezone)
}
func upUserTimezone(ctx context.Context, tx *sql.Tx) error {
// Determine if we have a new DB or not
isNew := ctx.Value("isNew").(bool)
if isNew {
return nil
}
// Copy table & create column
_, err := tx.Exec(`
-- Copy Table
CREATE TABLE temp_users AS SELECT * FROM users;
ALTER TABLE temp_users DROP COLUMN time_offset;
ALTER TABLE temp_users ADD COLUMN timezone TEXT;
UPDATE temp_users SET timezone = 'Europe/London';
-- Clean Table
DELETE FROM users;
ALTER TABLE users DROP COLUMN time_offset;
ALTER TABLE users ADD COLUMN timezone TEXT NOT NULL DEFAULT 'Europe/London';
-- Copy Temp Table -> Clean Table
INSERT INTO users SELECT * FROM temp_users;
-- Drop Temp Table
DROP TABLE temp_users;
`)
if err != nil {
return err
}
return nil
}
func downUserTimezone(ctx context.Context, tx *sql.Tx) error {
// Update column name & value
_, err := tx.Exec(`
ALTER TABLE users RENAME COLUMN timezone TO time_offset;
UPDATE users SET time_offset = '0 hours';
`)
if err != nil {
return err
}
return nil
}

View File

@ -99,12 +99,12 @@ type Setting struct {
} }
type User struct { type User struct {
ID string `json:"id"` ID string `json:"id"`
Pass *string `json:"-"` Pass *string `json:"-"`
AuthHash *string `json:"auth_hash"` AuthHash *string `json:"auth_hash"`
Admin bool `json:"-"` Admin bool `json:"-"`
Timezone *string `json:"timezone"` TimeOffset *string `json:"time_offset"`
CreatedAt string `json:"created_at"` CreatedAt string `json:"created_at"`
} }
type UserStreak struct { type UserStreak struct {
@ -117,3 +117,34 @@ type UserStreak struct {
CurrentStreakStartDate string `json:"current_streak_start_date"` CurrentStreakStartDate string `json:"current_streak_start_date"`
CurrentStreakEndDate string `json:"current_streak_end_date"` CurrentStreakEndDate string `json:"current_streak_end_date"`
} }
type ViewDocumentUserStatistic struct {
DocumentID string `json:"document_id"`
UserID string `json:"user_id"`
Percentage float64 `json:"percentage"`
LastRead interface{} `json:"last_read"`
ReadPercentage *float64 `json:"read_percentage"`
TotalTimeSeconds *float64 `json:"total_time_seconds"`
TotalWordsRead interface{} `json:"total_words_read"`
TotalWpm int64 `json:"total_wpm"`
YearlyTimeSeconds *float64 `json:"yearly_time_seconds"`
YearlyWordsRead interface{} `json:"yearly_words_read"`
YearlyWpm interface{} `json:"yearly_wpm"`
MonthlyTimeSeconds *float64 `json:"monthly_time_seconds"`
MonthlyWordsRead interface{} `json:"monthly_words_read"`
MonthlyWpm interface{} `json:"monthly_wpm"`
WeeklyTimeSeconds *float64 `json:"weekly_time_seconds"`
WeeklyWordsRead interface{} `json:"weekly_words_read"`
WeeklyWpm interface{} `json:"weekly_wpm"`
}
type ViewUserStreak struct {
UserID string `json:"user_id"`
Window string `json:"window"`
MaxStreak interface{} `json:"max_streak"`
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
CurrentStreak interface{} `json:"current_streak"`
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
}

View File

@ -64,7 +64,7 @@ WITH filtered_activity AS (
SELECT SELECT
document_id, document_id,
device_id, device_id,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(activity.start_time, users.timezone)) AS TEXT) AS start_time, CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
title, title,
author, author,
duration, duration,
@ -77,7 +77,7 @@ LEFT JOIN users ON users.id = activity.user_id;
-- name: GetDailyReadStats :many -- name: GetDailyReadStats :many
WITH RECURSIVE last_30_days AS ( WITH RECURSIVE last_30_days AS (
SELECT DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) AS date SELECT DATE('now', time_offset) AS date
FROM users WHERE users.id = $user_id FROM users WHERE users.id = $user_id
UNION ALL UNION ALL
SELECT DATE(date, '-1 days') SELECT DATE(date, '-1 days')
@ -96,7 +96,7 @@ filtered_activity AS (
activity_days AS ( activity_days AS (
SELECT SELECT
SUM(duration) AS seconds_read, SUM(duration) AS seconds_read,
DATE(LOCAL_TIME(start_time, timezone)) AS day DATE(start_time, time_offset) AS day
FROM filtered_activity AS activity FROM filtered_activity AS activity
LEFT JOIN users ON users.id = activity.user_id LEFT JOIN users ON users.id = activity.user_id
GROUP BY day GROUP BY day
@ -135,8 +135,8 @@ WHERE id = $device_id LIMIT 1;
SELECT SELECT
devices.id, devices.id,
devices.device_name, devices.device_name,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.created_at, users.timezone)) AS TEXT) AS created_at, CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.last_synced, users.timezone)) AS TEXT) AS last_synced CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
FROM devices FROM devices
JOIN users ON users.id = devices.user_id JOIN users ON users.id = devices.user_id
WHERE users.id = $user_id WHERE users.id = $user_id
@ -174,7 +174,7 @@ SELECT
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.read_percentage, 0) AS read_percentage,
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), LOCAL_TIME(users.timezone)) STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
AS last_read, AS last_read,
ROUND(CAST(CASE ROUND(CAST(CASE
WHEN dus.percentage IS NULL THEN 0.0 WHEN dus.percentage IS NULL THEN 0.0
@ -226,7 +226,7 @@ SELECT
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.read_percentage, 0) AS read_percentage,
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, "1970-01-01"), users.timezone)) STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
AS last_read, AS last_read,
ROUND(CAST(CASE ROUND(CAST(CASE
WHEN dus.percentage IS NULL THEN 0.0 WHEN dus.percentage IS NULL THEN 0.0
@ -280,7 +280,7 @@ SELECT
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage, ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
progress.document_id, progress.document_id,
progress.user_id, progress.user_id,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, LOCAL_TIME(users.timezone)) AS TEXT) AS created_at CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, users.time_offset) AS TEXT) AS created_at
FROM document_progress AS progress FROM document_progress AS progress
LEFT JOIN users ON progress.user_id = users.id LEFT JOIN users ON progress.user_id = users.id
LEFT JOIN devices ON progress.device_id = devices.id LEFT JOIN devices ON progress.device_id = devices.id
@ -369,7 +369,7 @@ UPDATE users
SET SET
pass = COALESCE($password, pass), pass = COALESCE($password, pass),
auth_hash = COALESCE($auth_hash, auth_hash), auth_hash = COALESCE($auth_hash, auth_hash),
timezone = COALESCE($timezone, timezone), time_offset = COALESCE($time_offset, time_offset),
admin = COALESCE($admin, admin) admin = COALESCE($admin, admin)
WHERE id = $user_id WHERE id = $user_id
RETURNING *; RETURNING *;

View File

@ -181,7 +181,7 @@ WITH filtered_activity AS (
SELECT SELECT
document_id, document_id,
device_id, device_id,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(activity.start_time, users.timezone)) AS TEXT) AS start_time, CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
title, title,
author, author,
duration, duration,
@ -254,7 +254,7 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Get
const getDailyReadStats = `-- name: GetDailyReadStats :many const getDailyReadStats = `-- name: GetDailyReadStats :many
WITH RECURSIVE last_30_days AS ( WITH RECURSIVE last_30_days AS (
SELECT DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) AS date SELECT DATE('now', time_offset) AS date
FROM users WHERE users.id = ?1 FROM users WHERE users.id = ?1
UNION ALL UNION ALL
SELECT DATE(date, '-1 days') SELECT DATE(date, '-1 days')
@ -273,7 +273,7 @@ filtered_activity AS (
activity_days AS ( activity_days AS (
SELECT SELECT
SUM(duration) AS seconds_read, SUM(duration) AS seconds_read,
DATE(LOCAL_TIME(start_time, timezone)) AS day DATE(start_time, time_offset) AS day
FROM filtered_activity AS activity FROM filtered_activity AS activity
LEFT JOIN users ON users.id = activity.user_id LEFT JOIN users ON users.id = activity.user_id
GROUP BY day GROUP BY day
@ -410,8 +410,8 @@ const getDevices = `-- name: GetDevices :many
SELECT SELECT
devices.id, devices.id,
devices.device_name, devices.device_name,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.created_at, users.timezone)) AS TEXT) AS created_at, CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.last_synced, users.timezone)) AS TEXT) AS last_synced CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
FROM devices FROM devices
JOIN users ON users.id = devices.user_id JOIN users ON users.id = devices.user_id
WHERE users.id = ?1 WHERE users.id = ?1
@ -544,7 +544,7 @@ SELECT
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.read_percentage, 0) AS read_percentage,
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), LOCAL_TIME(users.timezone)) STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
AS last_read, AS last_read,
ROUND(CAST(CASE ROUND(CAST(CASE
WHEN dus.percentage IS NULL THEN 0.0 WHEN dus.percentage IS NULL THEN 0.0
@ -698,9 +698,7 @@ SELECT
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.read_percentage, 0) AS read_percentage,
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
-- LOCAL_TIME(STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01")), users.timezone)
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"))
AS last_read, AS last_read,
ROUND(CAST(CASE ROUND(CAST(CASE
WHEN dus.percentage IS NULL THEN 0.0 WHEN dus.percentage IS NULL THEN 0.0
@ -889,7 +887,7 @@ SELECT
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage, ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
progress.document_id, progress.document_id,
progress.user_id, progress.user_id,
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, LOCAL_TIME(users.timezone)) AS TEXT) AS created_at CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, users.time_offset) AS TEXT) AS created_at
FROM document_progress AS progress FROM document_progress AS progress
LEFT JOIN users ON progress.user_id = users.id LEFT JOIN users ON progress.user_id = users.id
LEFT JOIN devices ON progress.device_id = devices.id LEFT JOIN devices ON progress.device_id = devices.id
@ -963,7 +961,7 @@ func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) ([]Get
} }
const getUser = `-- name: GetUser :one const getUser = `-- name: GetUser :one
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users SELECT id, pass, auth_hash, admin, time_offset, created_at FROM users
WHERE id = ?1 LIMIT 1 WHERE id = ?1 LIMIT 1
` `
@ -975,7 +973,7 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
&i.Pass, &i.Pass,
&i.AuthHash, &i.AuthHash,
&i.Admin, &i.Admin,
&i.Timezone, &i.TimeOffset,
&i.CreatedAt, &i.CreatedAt,
) )
return i, err return i, err
@ -1102,7 +1100,7 @@ func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStre
} }
const getUsers = `-- name: GetUsers :many const getUsers = `-- name: GetUsers :many
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users SELECT id, pass, auth_hash, admin, time_offset, created_at FROM users
` `
func (q *Queries) GetUsers(ctx context.Context) ([]User, error) { func (q *Queries) GetUsers(ctx context.Context) ([]User, error) {
@ -1119,7 +1117,7 @@ func (q *Queries) GetUsers(ctx context.Context) ([]User, error) {
&i.Pass, &i.Pass,
&i.AuthHash, &i.AuthHash,
&i.Admin, &i.Admin,
&i.Timezone, &i.TimeOffset,
&i.CreatedAt, &i.CreatedAt,
); err != nil { ); err != nil {
return nil, err return nil, err
@ -1253,25 +1251,25 @@ UPDATE users
SET SET
pass = COALESCE(?1, pass), pass = COALESCE(?1, pass),
auth_hash = COALESCE(?2, auth_hash), auth_hash = COALESCE(?2, auth_hash),
timezone = COALESCE(?3, timezone), time_offset = COALESCE(?3, time_offset),
admin = COALESCE(?4, admin) admin = COALESCE(?4, admin)
WHERE id = ?5 WHERE id = ?5
RETURNING id, pass, auth_hash, admin, timezone, created_at RETURNING id, pass, auth_hash, admin, time_offset, created_at
` `
type UpdateUserParams struct { type UpdateUserParams struct {
Password *string `json:"-"` Password *string `json:"-"`
AuthHash *string `json:"auth_hash"` AuthHash *string `json:"auth_hash"`
Timezone *string `json:"timezone"` TimeOffset *string `json:"time_offset"`
Admin bool `json:"-"` Admin bool `json:"-"`
UserID string `json:"user_id"` UserID string `json:"user_id"`
} }
func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) { func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) {
row := q.db.QueryRowContext(ctx, updateUser, row := q.db.QueryRowContext(ctx, updateUser,
arg.Password, arg.Password,
arg.AuthHash, arg.AuthHash,
arg.Timezone, arg.TimeOffset,
arg.Admin, arg.Admin,
arg.UserID, arg.UserID,
) )
@ -1281,7 +1279,7 @@ func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, e
&i.Pass, &i.Pass,
&i.AuthHash, &i.AuthHash,
&i.Admin, &i.Admin,
&i.Timezone, &i.TimeOffset,
&i.CreatedAt, &i.CreatedAt,
) )
return i, err return i, err

View File

@ -9,7 +9,7 @@ CREATE TABLE IF NOT EXISTS users (
pass TEXT NOT NULL, pass TEXT NOT NULL,
auth_hash TEXT NOT NULL, auth_hash TEXT NOT NULL,
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)), admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
timezone TEXT NOT NULL DEFAULT 'Europe/London', time_offset TEXT NOT NULL DEFAULT '0 hours',
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')) created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
); );
@ -174,9 +174,309 @@ CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
document_id document_id
); );
---------------------------------------------------------------
---------------------------- Views ----------------------------
---------------------------------------------------------------
DROP VIEW IF EXISTS view_user_streaks; DROP VIEW IF EXISTS view_user_streaks;
DROP VIEW IF EXISTS view_document_user_statistics; DROP VIEW IF EXISTS view_document_user_statistics;
--------------------------------
--------- User Streaks ---------
--------------------------------
CREATE VIEW view_user_streaks AS
WITH document_windows AS (
SELECT
activity.user_id,
users.time_offset,
DATE(
activity.start_time,
users.time_offset,
'weekday 0', '-7 day'
) AS weekly_read,
DATE(activity.start_time, users.time_offset) AS daily_read
FROM activity
LEFT JOIN users ON users.id = activity.user_id
GROUP BY activity.user_id, weekly_read, daily_read
),
weekly_partitions AS (
SELECT
user_id,
time_offset,
'WEEK' AS "window",
weekly_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY weekly_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, weekly_read
),
daily_partitions AS (
SELECT
user_id,
time_offset,
'DAY' AS "window",
daily_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY daily_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, daily_read
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
time_offset
FROM daily_partitions
GROUP BY
time_offset,
user_id,
DATE(read_window, '+' || seqnum || ' day')
UNION ALL
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
time_offset
FROM weekly_partitions
GROUP BY
time_offset,
user_id,
DATE(read_window, '+' || (seqnum * 7) || ' day')
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date,
window,
user_id
FROM streaks
GROUP BY user_id, window
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date,
window,
user_id
FROM streaks
WHERE CASE
WHEN window = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN window = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
GROUP BY user_id, window
)
SELECT
max_streak.user_id,
max_streak.window,
IFNULL(max_streak, 0) AS max_streak,
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON
current_streak.user_id = max_streak.user_id
AND current_streak.window = max_streak.window;
--------------------------------
------- Document Stats ---------
--------------------------------
CREATE VIEW view_document_user_statistics AS
WITH intermediate_ga AS (
SELECT
ga1.id AS row_id,
ga1.user_id,
ga1.document_id,
ga1.duration,
ga1.start_time,
ga1.start_percentage,
ga1.end_percentage,
-- Find Overlapping Events (Assign Unique ID)
(
SELECT MIN(id)
FROM activity AS ga2
WHERE
ga1.document_id = ga2.document_id
AND ga1.user_id = ga2.user_id
AND ga1.start_percentage <= ga2.end_percentage
AND ga1.end_percentage >= ga2.start_percentage
) AS group_leader
FROM activity AS ga1
),
grouped_activity AS (
SELECT
user_id,
document_id,
MAX(start_time) AS start_time,
MIN(start_percentage) AS start_percentage,
MAX(end_percentage) AS end_percentage,
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
SUM(duration) AS duration
FROM intermediate_ga
GROUP BY group_leader
),
current_progress AS (
SELECT
user_id,
document_id,
COALESCE((
SELECT percentage
FROM document_progress AS dp
WHERE
dp.user_id = iga.user_id
AND dp.document_id = iga.document_id
ORDER BY created_at DESC
LIMIT 1
), end_percentage) AS percentage
FROM intermediate_ga AS iga
GROUP BY user_id, document_id
HAVING MAX(start_time)
)
SELECT
ga.document_id,
ga.user_id,
cp.percentage,
MAX(start_time) AS last_read,
SUM(read_percentage) AS read_percentage,
-- All Time WPM
SUM(duration) AS total_time_seconds,
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
AS total_words_read,
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
/ (SUM(duration) / 60.0) AS total_wpm,
-- Yearly WPM
SUM(CASE WHEN start_time >= DATE('now', '-1 year') THEN duration ELSE 0 END)
AS yearly_time_seconds,
(
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
ELSE 0
END
)
)
AS yearly_words_read,
COALESCE((
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
END
)
)
/ (
SUM(
CASE
WHEN start_time >= DATE('now', '-1 year') THEN duration
END
)
/ 60.0
), 0.0)
AS yearly_wpm,
-- Monthly WPM
SUM(
CASE WHEN start_time >= DATE('now', '-1 month') THEN duration ELSE 0 END
)
AS monthly_time_seconds,
(
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
ELSE 0
END
)
)
AS monthly_words_read,
COALESCE((
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
END
)
)
/ (
SUM(
CASE
WHEN start_time >= DATE('now', '-1 month') THEN duration
END
)
/ 60.0
), 0.0)
AS monthly_wpm,
-- Weekly WPM
SUM(CASE WHEN start_time >= DATE('now', '-7 days') THEN duration ELSE 0 END)
AS weekly_time_seconds,
(
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
ELSE 0
END
)
)
AS weekly_words_read,
COALESCE((
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
END
)
)
/ (
SUM(
CASE
WHEN start_time >= DATE('now', '-7 days') THEN duration
END
)
/ 60.0
), 0.0)
AS weekly_wpm
FROM grouped_activity AS ga
INNER JOIN
current_progress AS cp
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
INNER JOIN
documents AS d
ON ga.document_id = d.id
GROUP BY ga.document_id, ga.user_id
ORDER BY total_wpm DESC;
--------------------------------------------------------------- ---------------------------------------------------------------
--------------------------- Triggers -------------------------- --------------------------- Triggers --------------------------
--------------------------------------------------------------- ---------------------------------------------------------------

View File

@ -1,298 +0,0 @@
---------------------------------------------------------------
---------------------------- Views ----------------------------
---------------------------------------------------------------
--------------------------------
--------- User Streaks ---------
--------------------------------
CREATE VIEW view_user_streaks AS
WITH document_windows AS (
SELECT
activity.user_id,
users.timezone,
DATE(
LOCAL_TIME(activity.start_time, users.timezone),
'weekday 0', '-7 day'
) AS weekly_read,
DATE(LOCAL_TIME(activity.start_time, users.timezone)) AS daily_read
FROM activity
LEFT JOIN users ON users.id = activity.user_id
GROUP BY activity.user_id, weekly_read, daily_read
),
weekly_partitions AS (
SELECT
user_id,
timezone,
'WEEK' AS "window",
weekly_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY weekly_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, weekly_read
),
daily_partitions AS (
SELECT
user_id,
timezone,
'DAY' AS "window",
daily_read AS read_window,
row_number() OVER (
PARTITION BY user_id ORDER BY daily_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, daily_read
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
timezone
FROM daily_partitions
GROUP BY
timezone,
user_id,
DATE(read_window, '+' || seqnum || ' day')
UNION ALL
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
timezone
FROM weekly_partitions
GROUP BY
timezone,
user_id,
DATE(read_window, '+' || (seqnum * 7) || ' day')
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date,
window,
user_id
FROM streaks
GROUP BY user_id, window
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date,
window,
user_id
FROM streaks
WHERE CASE
WHEN window = "WEEK" THEN
DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-14 day') = current_streak_end_date
OR DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-7 day') = current_streak_end_date
WHEN window = "DAY" THEN
DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), '-1 day') = current_streak_end_date
OR DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) = current_streak_end_date
END
GROUP BY user_id, window
)
SELECT
max_streak.user_id,
max_streak.window,
IFNULL(max_streak, 0) AS max_streak,
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON
current_streak.user_id = max_streak.user_id
AND current_streak.window = max_streak.window;
--------------------------------
------- Document Stats ---------
--------------------------------
CREATE VIEW view_document_user_statistics AS
WITH intermediate_ga AS (
SELECT
ga1.id AS row_id,
ga1.user_id,
ga1.document_id,
ga1.duration,
ga1.start_time,
ga1.start_percentage,
ga1.end_percentage,
-- Find Overlapping Events (Assign Unique ID)
(
SELECT MIN(id)
FROM activity AS ga2
WHERE
ga1.document_id = ga2.document_id
AND ga1.user_id = ga2.user_id
AND ga1.start_percentage <= ga2.end_percentage
AND ga1.end_percentage >= ga2.start_percentage
) AS group_leader
FROM activity AS ga1
),
grouped_activity AS (
SELECT
user_id,
document_id,
MAX(start_time) AS start_time,
MIN(start_percentage) AS start_percentage,
MAX(end_percentage) AS end_percentage,
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
SUM(duration) AS duration
FROM intermediate_ga
GROUP BY group_leader
),
current_progress AS (
SELECT
user_id,
document_id,
COALESCE((
SELECT percentage
FROM document_progress AS dp
WHERE
dp.user_id = iga.user_id
AND dp.document_id = iga.document_id
ORDER BY created_at DESC
LIMIT 1
), end_percentage) AS percentage
FROM intermediate_ga AS iga
GROUP BY user_id, document_id
HAVING MAX(start_time)
)
SELECT
ga.document_id,
ga.user_id,
cp.percentage,
MAX(start_time) AS last_read,
SUM(read_percentage) AS read_percentage,
-- All Time WPM
SUM(duration) AS total_time_seconds,
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
AS total_words_read,
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
/ (SUM(duration) / 60.0) AS total_wpm,
-- Yearly WPM
SUM(CASE WHEN start_time >= DATE('now', '-1 year') THEN duration ELSE 0 END)
AS yearly_time_seconds,
(
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
ELSE 0
END
)
)
AS yearly_words_read,
COALESCE((
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
END
)
)
/ (
SUM(
CASE
WHEN start_time >= DATE('now', '-1 year') THEN duration
END
)
/ 60.0
), 0.0)
AS yearly_wpm,
-- Monthly WPM
SUM(
CASE WHEN start_time >= DATE('now', '-1 month') THEN duration ELSE 0 END
)
AS monthly_time_seconds,
(
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
ELSE 0
END
)
)
AS monthly_words_read,
COALESCE((
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
END
)
)
/ (
SUM(
CASE
WHEN start_time >= DATE('now', '-1 month') THEN duration
END
)
/ 60.0
), 0.0)
AS monthly_wpm,
-- Weekly WPM
SUM(CASE WHEN start_time >= DATE('now', '-7 days') THEN duration ELSE 0 END)
AS weekly_time_seconds,
(
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
ELSE 0
END
)
)
AS weekly_words_read,
COALESCE((
CAST(COALESCE(d.words, 0.0) AS REAL)
* SUM(
CASE
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
END
)
)
/ (
SUM(
CASE
WHEN start_time >= DATE('now', '-7 days') THEN duration
END
)
/ 60.0
), 0.0)
AS weekly_wpm
FROM grouped_activity AS ga
INNER JOIN
current_progress AS cp
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
INNER JOIN
documents AS d
ON ga.document_id = d.id
GROUP BY ga.document_id, ga.user_id
ORDER BY total_wpm DESC;

View File

@ -120,7 +120,7 @@ sql:
go_type: go_type:
type: "string" type: "string"
pointer: true pointer: true
- column: "users.timezone" - column: "users.time_offset"
go_type: go_type:
type: "string" type: "string"
pointer: true pointer: true

View File

@ -9,13 +9,8 @@
action="./users" action="./users"
class="flex flex-col gap-2 text-black dark:text-white text-sm"> class="flex flex-col gap-2 text-black dark:text-white text-sm">
<input type="text" <input type="text"
id="operation" id="username"
name="operation" name="username"
value="CREATE"
class="hidden" />
<input type="text"
id="user"
name="user"
placeholder="User" placeholder="User"
class="p-2 bg-gray-300 text-black dark:bg-gray-700 dark:text-white" /> class="p-2 bg-gray-300 text-black dark:bg-gray-700 dark:text-white" />
<input type="password" <input type="password"

View File

@ -51,7 +51,7 @@
{{ end }} {{ end }}
</div> </div>
<div class="flex flex-col grow gap-2 p-4 rounded shadow-lg bg-white dark:bg-gray-700 text-gray-500 dark:text-white"> <div class="flex flex-col grow gap-2 p-4 rounded shadow-lg bg-white dark:bg-gray-700 text-gray-500 dark:text-white">
<p class="text-lg font-semibold mb-2">Change Timezone</p> <p class="text-lg font-semibold mb-2">Change Time Offset</p>
<form class="flex gap-4 flex-col lg:flex-row" <form class="flex gap-4 flex-col lg:flex-row"
action="./settings" action="./settings"
method="POST"> method="POST">
@ -60,11 +60,11 @@
{{ template "svg/clock" (dict "Size" 15) }} {{ template "svg/clock" (dict "Size" 15) }}
</span> </span>
<select class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent" <select class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
id="timezone" id="time_offset"
name="timezone"> name="time_offset">
{{ range $item := getTimeZones }} {{ range $item := getUTCOffsets }}
<option {{ if (eq $item $.Data.Timezone) }}selected{{ end }} value="{{ $item }}"> <option {{ if (eq $item.Value $.Data.TimeOffset) }}selected{{ end }} value="{{ $item.Value }}">
{{ $item }} {{ $item.Name }}
</option> </option>
{{ end }} {{ end }}
</select> </select>