diff --git a/api/api.go b/api/api.go index 5a12110..278e792 100644 --- a/api/api.go +++ b/api/api.go @@ -228,7 +228,7 @@ func (api *API) generateTemplates() *multitemplate.Renderer { "dict": dict, "fields": fields, "getSVGGraphData": getSVGGraphData, - "getUTCOffsets": getUTCOffsets, + "getTimeZones": getTimeZones, "hasPrefix": strings.HasPrefix, "niceNumbers": niceNumbers, "niceSeconds": niceSeconds, diff --git a/api/app-routes.go b/api/app-routes.go index 079bd29..9a3d833 100644 --- a/api/app-routes.go +++ b/api/app-routes.go @@ -69,7 +69,7 @@ type requestDocumentIdentify struct { type requestSettingsEdit struct { Password *string `form:"password"` NewPassword *string `form:"new_password"` - TimeOffset *string `form:"time_offset"` + Timezone *string `form:"timezone"` } type requestDocumentAdd struct { @@ -299,8 +299,8 @@ func (api *API) appGetSettings(c *gin.Context) { } templateVars["Data"] = gin.H{ - "TimeOffset": *user.TimeOffset, - "Devices": devices, + "Timezone": *user.Timezone, + "Devices": devices, } c.HTML(http.StatusOK, "page/settings", templateVars) @@ -869,7 +869,7 @@ func (api *API) appEditSettings(c *gin.Context) { } // Validate Something Exists - if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.TimeOffset == nil { + if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.Timezone == nil { log.Error("Missing Form Values") appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values") return @@ -901,9 +901,9 @@ func (api *API) appEditSettings(c *gin.Context) { } // Set Time Offset - if rUserSettings.TimeOffset != nil { + if rUserSettings.Timezone != nil { templateVars["TimeOffsetMessage"] = "Time Offset Updated" - newUserSettings.TimeOffset = rUserSettings.TimeOffset + newUserSettings.Timezone = rUserSettings.Timezone } // Update User @@ -931,8 +931,8 @@ func (api *API) appEditSettings(c *gin.Context) { } templateVars["Data"] = gin.H{ - "TimeOffset": *user.TimeOffset, - "Devices": devices, + "Timezone": *user.Timezone, + "Devices": devices, } c.HTML(http.StatusOK, "page/settings", templateVars) diff --git a/api/utils.go b/api/utils.go index e918071..40f6e5f 100644 --- a/api/utils.go +++ b/api/utils.go @@ -13,54 +13,43 @@ import ( "reichard.io/antholume/metadata" ) -type UTCOffset struct { - Name string - Value string -} - -var UTC_OFFSETS = []UTCOffset{ - {Value: "-12 hours", Name: "UTC−12:00"}, - {Value: "-11 hours", Name: "UTC−11:00"}, - {Value: "-10 hours", Name: "UTC−10:00"}, - {Value: "-9.5 hours", Name: "UTC−09:30"}, - {Value: "-9 hours", Name: "UTC−09:00"}, - {Value: "-8 hours", Name: "UTC−08:00"}, - {Value: "-7 hours", Name: "UTC−07:00"}, - {Value: "-6 hours", Name: "UTC−06:00"}, - {Value: "-5 hours", Name: "UTC−05:00"}, - {Value: "-4 hours", Name: "UTC−04:00"}, - {Value: "-3.5 hours", Name: "UTC−03:30"}, - {Value: "-3 hours", Name: "UTC−03:00"}, - {Value: "-2 hours", Name: "UTC−02:00"}, - {Value: "-1 hours", Name: "UTC−01:00"}, - {Value: "0 hours", Name: "UTC±00:00"}, - {Value: "+1 hours", Name: "UTC+01:00"}, - {Value: "+2 hours", Name: "UTC+02:00"}, - {Value: "+3 hours", Name: "UTC+03:00"}, - {Value: "+3.5 hours", Name: "UTC+03:30"}, - {Value: "+4 hours", Name: "UTC+04:00"}, - {Value: "+4.5 hours", Name: "UTC+04:30"}, - {Value: "+5 hours", Name: "UTC+05:00"}, - {Value: "+5.5 hours", Name: "UTC+05:30"}, - {Value: "+5.75 hours", Name: "UTC+05:45"}, - {Value: "+6 hours", Name: "UTC+06:00"}, - {Value: "+6.5 hours", Name: "UTC+06:30"}, - {Value: "+7 hours", Name: "UTC+07:00"}, - {Value: "+8 hours", Name: "UTC+08:00"}, - {Value: "+8.75 hours", Name: "UTC+08:45"}, - {Value: "+9 hours", Name: "UTC+09:00"}, - {Value: "+9.5 hours", Name: "UTC+09:30"}, - {Value: "+10 hours", Name: "UTC+10:00"}, - {Value: "+10.5 hours", Name: "UTC+10:30"}, - {Value: "+11 hours", Name: "UTC+11:00"}, - {Value: "+12 hours", Name: "UTC+12:00"}, - {Value: "+12.75 hours", Name: "UTC+12:45"}, - {Value: "+13 hours", Name: "UTC+13:00"}, - {Value: "+14 hours", Name: "UTC+14:00"}, -} - -func getUTCOffsets() []UTCOffset { - return UTC_OFFSETS +func getTimeZones() []string { + return []string{ + "Africa/Cairo", + "Africa/Johannesburg", + "Africa/Lagos", + "Africa/Nairobi", + "America/Adak", + "America/Anchorage", + "America/Buenos_Aires", + "America/Chicago", + "America/Denver", + "America/Los_Angeles", + "America/Mexico_City", + "America/New_York", + "America/Nuuk", + "America/Phoenix", + "America/Puerto_Rico", + "America/Sao_Paulo", + "America/St_Johns", + "America/Toronto", + "Asia/Dubai", + "Asia/Hong_Kong", + "Asia/Kolkata", + "Asia/Seoul", + "Asia/Shanghai", + "Asia/Singapore", + "Asia/Tokyo", + "Atlantic/Azores", + "Australia/Melbourne", + "Australia/Sydney", + "Europe/Berlin", + "Europe/London", + "Europe/Moscow", + "Europe/Paris", + "Pacific/Auckland", + "Pacific/Honolulu", + } } func niceSeconds(input int64) (result string) { diff --git a/database/manager.go b/database/manager.go index 9e41074..e2bb13b 100644 --- a/database/manager.go +++ b/database/manager.go @@ -3,15 +3,17 @@ package database import ( "context" "database/sql" + "database/sql/driver" "embed" _ "embed" + "errors" "fmt" "path/filepath" "time" "github.com/pressly/goose/v3" log "github.com/sirupsen/logrus" - _ "modernc.org/sqlite" + sqlite "modernc.org/sqlite" "reichard.io/antholume/config" _ "reichard.io/antholume/database/migrations" ) @@ -26,9 +28,21 @@ type DBManager struct { //go:embed schema.sql var ddl string +//go:embed views.sql +var views string + //go:embed migrations/* var migrations embed.FS +// Register scalar sqlite function on init +func init() { + sqlite.MustRegisterFunction("LOCAL_TIME", &sqlite.FunctionImpl{ + NArgs: 2, + Deterministic: true, + Scalar: localTime, + }) +} + // Returns an initialized manager func NewMgr(c *config.Config) *DBManager { // Create Manager @@ -90,6 +104,12 @@ func (dbm *DBManager) init() error { return err } + // Execute views + if _, err := dbm.DB.Exec(views, nil); err != nil { + log.Panicf("Error executing views: %v", err) + return err + } + // Update settings err = dbm.updateSettings() if err != nil { @@ -182,6 +202,7 @@ func (dbm *DBManager) performMigrations(isNew bool) error { return goose.UpContext(ctx, dbm.DB, "migrations") } +// Determines whether the database is empty func isEmpty(db *sql.DB) (bool, error) { var tableCount int err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount) @@ -190,3 +211,28 @@ func isEmpty(db *sql.DB) (bool, error) { } return tableCount == 0, nil } + +// LOCAL_TIME custom SQL function +func localTime(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) { + timeStr, ok := args[0].(string) + if !ok { + return nil, errors.New("both arguments to TZTime must be strings") + } + + timeZoneStr, ok := args[1].(string) + if !ok { + return nil, errors.New("both arguments to TZTime must be strings") + } + + timeZone, err := time.LoadLocation(timeZoneStr) + if err != nil { + return nil, errors.New("unable to parse timezone") + } + + formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC) + if err != nil { + return nil, errors.New("unable to parse time") + } + + return formattedTime.In(timeZone).Format("2006-01-02 15:04:05.000"), nil +} diff --git a/database/migrations/20240311121111_user_timezone.go b/database/migrations/20240311121111_user_timezone.go new file mode 100644 index 0000000..027cd03 --- /dev/null +++ b/database/migrations/20240311121111_user_timezone.go @@ -0,0 +1,58 @@ +package migrations + +import ( + "context" + "database/sql" + + "github.com/pressly/goose/v3" +) + +func init() { + goose.AddMigrationContext(upUserTimezone, downUserTimezone) +} + +func upUserTimezone(ctx context.Context, tx *sql.Tx) error { + // Determine if we have a new DB or not + isNew := ctx.Value("isNew").(bool) + if isNew { + return nil + } + + // Copy table & create column + _, err := tx.Exec(` + -- Copy Table + CREATE TABLE temp_users AS SELECT * FROM users; + ALTER TABLE temp_users DROP COLUMN time_offset; + ALTER TABLE temp_users ADD COLUMN timezone TEXT; + UPDATE temp_users SET timezone = 'Europe/London'; + + -- Clean Table + DELETE FROM users; + ALTER TABLE users DROP COLUMN time_offset; + ALTER TABLE users ADD COLUMN timezone TEXT NOT NULL DEFAULT 'Europe/London'; + + -- Copy Temp Table -> Clean Table + INSERT INTO users SELECT * FROM temp_users; + + -- Drop Temp Table + DROP TABLE temp_users; + `) + if err != nil { + return err + } + + return nil +} + +func downUserTimezone(ctx context.Context, tx *sql.Tx) error { + // Update column name & value + _, err := tx.Exec(` + ALTER TABLE users RENAME COLUMN timezone TO time_offset; + UPDATE users SET time_offset = '0 hours'; + `) + if err != nil { + return err + } + + return nil +} diff --git a/database/models.go b/database/models.go index 33f8b11..30eee1d 100644 --- a/database/models.go +++ b/database/models.go @@ -99,12 +99,12 @@ type Setting struct { } type User struct { - ID string `json:"id"` - Pass *string `json:"-"` - AuthHash *string `json:"auth_hash"` - Admin bool `json:"-"` - TimeOffset *string `json:"time_offset"` - CreatedAt string `json:"created_at"` + ID string `json:"id"` + Pass *string `json:"-"` + AuthHash *string `json:"auth_hash"` + Admin bool `json:"-"` + Timezone *string `json:"timezone"` + CreatedAt string `json:"created_at"` } type UserStreak struct { @@ -117,34 +117,3 @@ type UserStreak struct { CurrentStreakStartDate string `json:"current_streak_start_date"` CurrentStreakEndDate string `json:"current_streak_end_date"` } - -type ViewDocumentUserStatistic struct { - DocumentID string `json:"document_id"` - UserID string `json:"user_id"` - Percentage float64 `json:"percentage"` - LastRead interface{} `json:"last_read"` - ReadPercentage *float64 `json:"read_percentage"` - TotalTimeSeconds *float64 `json:"total_time_seconds"` - TotalWordsRead interface{} `json:"total_words_read"` - TotalWpm int64 `json:"total_wpm"` - YearlyTimeSeconds *float64 `json:"yearly_time_seconds"` - YearlyWordsRead interface{} `json:"yearly_words_read"` - YearlyWpm interface{} `json:"yearly_wpm"` - MonthlyTimeSeconds *float64 `json:"monthly_time_seconds"` - MonthlyWordsRead interface{} `json:"monthly_words_read"` - MonthlyWpm interface{} `json:"monthly_wpm"` - WeeklyTimeSeconds *float64 `json:"weekly_time_seconds"` - WeeklyWordsRead interface{} `json:"weekly_words_read"` - WeeklyWpm interface{} `json:"weekly_wpm"` -} - -type ViewUserStreak struct { - UserID string `json:"user_id"` - Window string `json:"window"` - MaxStreak interface{} `json:"max_streak"` - MaxStreakStartDate interface{} `json:"max_streak_start_date"` - MaxStreakEndDate interface{} `json:"max_streak_end_date"` - CurrentStreak interface{} `json:"current_streak"` - CurrentStreakStartDate interface{} `json:"current_streak_start_date"` - CurrentStreakEndDate interface{} `json:"current_streak_end_date"` -} diff --git a/database/query.sql b/database/query.sql index 407799e..5242e8d 100644 --- a/database/query.sql +++ b/database/query.sql @@ -64,7 +64,7 @@ WITH filtered_activity AS ( SELECT document_id, device_id, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time, + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(activity.start_time, users.timezone)) AS TEXT) AS start_time, title, author, duration, @@ -77,7 +77,7 @@ LEFT JOIN users ON users.id = activity.user_id; -- name: GetDailyReadStats :many WITH RECURSIVE last_30_days AS ( - SELECT DATE('now', time_offset) AS date + SELECT DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) AS date FROM users WHERE users.id = $user_id UNION ALL SELECT DATE(date, '-1 days') @@ -96,7 +96,7 @@ filtered_activity AS ( activity_days AS ( SELECT SUM(duration) AS seconds_read, - DATE(start_time, time_offset) AS day + DATE(LOCAL_TIME(start_time, timezone)) AS day FROM filtered_activity AS activity LEFT JOIN users ON users.id = activity.user_id GROUP BY day @@ -135,8 +135,8 @@ WHERE id = $device_id LIMIT 1; SELECT devices.id, devices.device_name, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.created_at, users.timezone)) AS TEXT) AS created_at, + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.last_synced, users.timezone)) AS TEXT) AS last_synced FROM devices JOIN users ON users.id = devices.user_id WHERE users.id = $user_id @@ -174,7 +174,7 @@ SELECT CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, - STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), LOCAL_TIME(users.timezone)) AS last_read, ROUND(CAST(CASE WHEN dus.percentage IS NULL THEN 0.0 @@ -226,7 +226,7 @@ SELECT CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, - STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, "1970-01-01"), users.timezone)) AS last_read, ROUND(CAST(CASE WHEN dus.percentage IS NULL THEN 0.0 @@ -280,7 +280,7 @@ SELECT ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage, progress.document_id, progress.user_id, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, users.time_offset) AS TEXT) AS created_at + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, LOCAL_TIME(users.timezone)) AS TEXT) AS created_at FROM document_progress AS progress LEFT JOIN users ON progress.user_id = users.id LEFT JOIN devices ON progress.device_id = devices.id @@ -369,7 +369,7 @@ UPDATE users SET pass = COALESCE($password, pass), auth_hash = COALESCE($auth_hash, auth_hash), - time_offset = COALESCE($time_offset, time_offset), + timezone = COALESCE($timezone, timezone), admin = COALESCE($admin, admin) WHERE id = $user_id RETURNING *; diff --git a/database/query.sql.go b/database/query.sql.go index 41998f6..4094d78 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -181,7 +181,7 @@ WITH filtered_activity AS ( SELECT document_id, device_id, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time, + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(activity.start_time, users.timezone)) AS TEXT) AS start_time, title, author, duration, @@ -254,7 +254,7 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Get const getDailyReadStats = `-- name: GetDailyReadStats :many WITH RECURSIVE last_30_days AS ( - SELECT DATE('now', time_offset) AS date + SELECT DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) AS date FROM users WHERE users.id = ?1 UNION ALL SELECT DATE(date, '-1 days') @@ -273,7 +273,7 @@ filtered_activity AS ( activity_days AS ( SELECT SUM(duration) AS seconds_read, - DATE(start_time, time_offset) AS day + DATE(LOCAL_TIME(start_time, timezone)) AS day FROM filtered_activity AS activity LEFT JOIN users ON users.id = activity.user_id GROUP BY day @@ -410,8 +410,8 @@ const getDevices = `-- name: GetDevices :many SELECT devices.id, devices.device_name, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.created_at, users.timezone)) AS TEXT) AS created_at, + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.last_synced, users.timezone)) AS TEXT) AS last_synced FROM devices JOIN users ON users.id = devices.user_id WHERE users.id = ?1 @@ -544,7 +544,7 @@ SELECT CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, - STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), LOCAL_TIME(users.timezone)) AS last_read, ROUND(CAST(CASE WHEN dus.percentage IS NULL THEN 0.0 @@ -698,7 +698,9 @@ SELECT CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm, COALESCE(dus.read_percentage, 0) AS read_percentage, COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, - STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + + -- LOCAL_TIME(STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01")), users.timezone) + STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01")) AS last_read, ROUND(CAST(CASE WHEN dus.percentage IS NULL THEN 0.0 @@ -887,7 +889,7 @@ SELECT ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage, progress.document_id, progress.user_id, - CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, users.time_offset) AS TEXT) AS created_at + CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, LOCAL_TIME(users.timezone)) AS TEXT) AS created_at FROM document_progress AS progress LEFT JOIN users ON progress.user_id = users.id LEFT JOIN devices ON progress.device_id = devices.id @@ -961,7 +963,7 @@ func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) ([]Get } const getUser = `-- name: GetUser :one -SELECT id, pass, auth_hash, admin, time_offset, created_at FROM users +SELECT id, pass, auth_hash, admin, timezone, created_at FROM users WHERE id = ?1 LIMIT 1 ` @@ -973,7 +975,7 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) { &i.Pass, &i.AuthHash, &i.Admin, - &i.TimeOffset, + &i.Timezone, &i.CreatedAt, ) return i, err @@ -1100,7 +1102,7 @@ func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStre } const getUsers = `-- name: GetUsers :many -SELECT id, pass, auth_hash, admin, time_offset, created_at FROM users +SELECT id, pass, auth_hash, admin, timezone, created_at FROM users ` func (q *Queries) GetUsers(ctx context.Context) ([]User, error) { @@ -1117,7 +1119,7 @@ func (q *Queries) GetUsers(ctx context.Context) ([]User, error) { &i.Pass, &i.AuthHash, &i.Admin, - &i.TimeOffset, + &i.Timezone, &i.CreatedAt, ); err != nil { return nil, err @@ -1251,25 +1253,25 @@ UPDATE users SET pass = COALESCE(?1, pass), auth_hash = COALESCE(?2, auth_hash), - time_offset = COALESCE(?3, time_offset), + timezone = COALESCE(?3, timezone), admin = COALESCE(?4, admin) WHERE id = ?5 -RETURNING id, pass, auth_hash, admin, time_offset, created_at +RETURNING id, pass, auth_hash, admin, timezone, created_at ` type UpdateUserParams struct { - Password *string `json:"-"` - AuthHash *string `json:"auth_hash"` - TimeOffset *string `json:"time_offset"` - Admin bool `json:"-"` - UserID string `json:"user_id"` + Password *string `json:"-"` + AuthHash *string `json:"auth_hash"` + Timezone *string `json:"timezone"` + Admin bool `json:"-"` + UserID string `json:"user_id"` } func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) { row := q.db.QueryRowContext(ctx, updateUser, arg.Password, arg.AuthHash, - arg.TimeOffset, + arg.Timezone, arg.Admin, arg.UserID, ) @@ -1279,7 +1281,7 @@ func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, e &i.Pass, &i.AuthHash, &i.Admin, - &i.TimeOffset, + &i.Timezone, &i.CreatedAt, ) return i, err diff --git a/database/schema.sql b/database/schema.sql index 2e8d0b9..fb70a39 100644 --- a/database/schema.sql +++ b/database/schema.sql @@ -9,7 +9,7 @@ CREATE TABLE IF NOT EXISTS users ( pass TEXT NOT NULL, auth_hash TEXT NOT NULL, admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)), - time_offset TEXT NOT NULL DEFAULT '0 hours', + timezone TEXT NOT NULL DEFAULT 'Europe/London', created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')) ); @@ -174,309 +174,9 @@ CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity ( document_id ); - ---------------------------------------------------------------- ----------------------------- Views ---------------------------- ---------------------------------------------------------------- - DROP VIEW IF EXISTS view_user_streaks; DROP VIEW IF EXISTS view_document_user_statistics; --------------------------------- ---------- User Streaks --------- --------------------------------- - -CREATE VIEW view_user_streaks AS - -WITH document_windows AS ( - SELECT - activity.user_id, - users.time_offset, - DATE( - activity.start_time, - users.time_offset, - 'weekday 0', '-7 day' - ) AS weekly_read, - DATE(activity.start_time, users.time_offset) AS daily_read - FROM activity - LEFT JOIN users ON users.id = activity.user_id - GROUP BY activity.user_id, weekly_read, daily_read -), -weekly_partitions AS ( - SELECT - user_id, - time_offset, - 'WEEK' AS "window", - weekly_read AS read_window, - row_number() OVER ( - PARTITION BY user_id ORDER BY weekly_read DESC - ) AS seqnum - FROM document_windows - GROUP BY user_id, weekly_read -), -daily_partitions AS ( - SELECT - user_id, - time_offset, - 'DAY' AS "window", - daily_read AS read_window, - row_number() OVER ( - PARTITION BY user_id ORDER BY daily_read DESC - ) AS seqnum - FROM document_windows - GROUP BY user_id, daily_read -), -streaks AS ( - SELECT - COUNT(*) AS streak, - MIN(read_window) AS start_date, - MAX(read_window) AS end_date, - window, - user_id, - time_offset - FROM daily_partitions - GROUP BY - time_offset, - user_id, - DATE(read_window, '+' || seqnum || ' day') - - UNION ALL - - SELECT - COUNT(*) AS streak, - MIN(read_window) AS start_date, - MAX(read_window) AS end_date, - window, - user_id, - time_offset - FROM weekly_partitions - GROUP BY - time_offset, - user_id, - DATE(read_window, '+' || (seqnum * 7) || ' day') -), -max_streak AS ( - SELECT - MAX(streak) AS max_streak, - start_date AS max_streak_start_date, - end_date AS max_streak_end_date, - window, - user_id - FROM streaks - GROUP BY user_id, window -), -current_streak AS ( - SELECT - streak AS current_streak, - start_date AS current_streak_start_date, - end_date AS current_streak_end_date, - window, - user_id - FROM streaks - WHERE CASE - WHEN window = "WEEK" THEN - DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date - OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date - WHEN window = "DAY" THEN - DATE('now', time_offset, '-1 day') = current_streak_end_date - OR DATE('now', time_offset) = current_streak_end_date - END - GROUP BY user_id, window -) -SELECT - max_streak.user_id, - max_streak.window, - IFNULL(max_streak, 0) AS max_streak, - IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date, - IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date, - IFNULL(current_streak, 0) AS current_streak, - IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date, - IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date -FROM max_streak -LEFT JOIN current_streak ON - current_streak.user_id = max_streak.user_id - AND current_streak.window = max_streak.window; - --------------------------------- -------- Document Stats --------- --------------------------------- - -CREATE VIEW view_document_user_statistics AS - -WITH intermediate_ga AS ( - SELECT - ga1.id AS row_id, - ga1.user_id, - ga1.document_id, - ga1.duration, - ga1.start_time, - ga1.start_percentage, - ga1.end_percentage, - - -- Find Overlapping Events (Assign Unique ID) - ( - SELECT MIN(id) - FROM activity AS ga2 - WHERE - ga1.document_id = ga2.document_id - AND ga1.user_id = ga2.user_id - AND ga1.start_percentage <= ga2.end_percentage - AND ga1.end_percentage >= ga2.start_percentage - ) AS group_leader - FROM activity AS ga1 -), - -grouped_activity AS ( - SELECT - user_id, - document_id, - MAX(start_time) AS start_time, - MIN(start_percentage) AS start_percentage, - MAX(end_percentage) AS end_percentage, - MAX(end_percentage) - MIN(start_percentage) AS read_percentage, - SUM(duration) AS duration - FROM intermediate_ga - GROUP BY group_leader -), - -current_progress AS ( - SELECT - user_id, - document_id, - COALESCE(( - SELECT percentage - FROM document_progress AS dp - WHERE - dp.user_id = iga.user_id - AND dp.document_id = iga.document_id - ORDER BY created_at DESC - LIMIT 1 - ), end_percentage) AS percentage - FROM intermediate_ga AS iga - GROUP BY user_id, document_id - HAVING MAX(start_time) -) - -SELECT - ga.document_id, - ga.user_id, - cp.percentage, - MAX(start_time) AS last_read, - SUM(read_percentage) AS read_percentage, - - -- All Time WPM - SUM(duration) AS total_time_seconds, - (CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage)) - AS total_words_read, - (CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage)) - / (SUM(duration) / 60.0) AS total_wpm, - - -- Yearly WPM - SUM(CASE WHEN start_time >= DATE('now', '-1 year') THEN duration ELSE 0 END) - AS yearly_time_seconds, - ( - CAST(COALESCE(d.words, 0.0) AS REAL) - * SUM( - CASE - WHEN start_time >= DATE('now', '-1 year') THEN read_percentage - ELSE 0 - END - ) - ) - AS yearly_words_read, - COALESCE(( - CAST(COALESCE(d.words, 0.0) AS REAL) - * SUM( - CASE - WHEN start_time >= DATE('now', '-1 year') THEN read_percentage - END - ) - ) - / ( - SUM( - CASE - WHEN start_time >= DATE('now', '-1 year') THEN duration - END - ) - / 60.0 - ), 0.0) - AS yearly_wpm, - - -- Monthly WPM - SUM( - CASE WHEN start_time >= DATE('now', '-1 month') THEN duration ELSE 0 END - ) - AS monthly_time_seconds, - ( - CAST(COALESCE(d.words, 0.0) AS REAL) - * SUM( - CASE - WHEN start_time >= DATE('now', '-1 month') THEN read_percentage - ELSE 0 - END - ) - ) - AS monthly_words_read, - COALESCE(( - CAST(COALESCE(d.words, 0.0) AS REAL) - * SUM( - CASE - WHEN start_time >= DATE('now', '-1 month') THEN read_percentage - END - ) - ) - / ( - SUM( - CASE - WHEN start_time >= DATE('now', '-1 month') THEN duration - END - ) - / 60.0 - ), 0.0) - AS monthly_wpm, - - -- Weekly WPM - SUM(CASE WHEN start_time >= DATE('now', '-7 days') THEN duration ELSE 0 END) - AS weekly_time_seconds, - ( - CAST(COALESCE(d.words, 0.0) AS REAL) - * SUM( - CASE - WHEN start_time >= DATE('now', '-7 days') THEN read_percentage - ELSE 0 - END - ) - ) - AS weekly_words_read, - COALESCE(( - CAST(COALESCE(d.words, 0.0) AS REAL) - * SUM( - CASE - WHEN start_time >= DATE('now', '-7 days') THEN read_percentage - END - ) - ) - / ( - SUM( - CASE - WHEN start_time >= DATE('now', '-7 days') THEN duration - END - ) - / 60.0 - ), 0.0) - AS weekly_wpm - -FROM grouped_activity AS ga -INNER JOIN - current_progress AS cp - ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id -INNER JOIN - documents AS d - ON ga.document_id = d.id -GROUP BY ga.document_id, ga.user_id -ORDER BY total_wpm DESC; - - --------------------------------------------------------------- --------------------------- Triggers -------------------------- --------------------------------------------------------------- diff --git a/database/views.sql b/database/views.sql new file mode 100644 index 0000000..3e95fb3 --- /dev/null +++ b/database/views.sql @@ -0,0 +1,298 @@ +--------------------------------------------------------------- +---------------------------- Views ---------------------------- +--------------------------------------------------------------- + +-------------------------------- +--------- User Streaks --------- +-------------------------------- + +CREATE VIEW view_user_streaks AS + +WITH document_windows AS ( + SELECT + activity.user_id, + users.timezone, + DATE( + LOCAL_TIME(activity.start_time, users.timezone), + 'weekday 0', '-7 day' + ) AS weekly_read, + DATE(LOCAL_TIME(activity.start_time, users.timezone)) AS daily_read + FROM activity + LEFT JOIN users ON users.id = activity.user_id + GROUP BY activity.user_id, weekly_read, daily_read +), +weekly_partitions AS ( + SELECT + user_id, + timezone, + 'WEEK' AS "window", + weekly_read AS read_window, + row_number() OVER ( + PARTITION BY user_id ORDER BY weekly_read DESC + ) AS seqnum + FROM document_windows + GROUP BY user_id, weekly_read +), +daily_partitions AS ( + SELECT + user_id, + timezone, + 'DAY' AS "window", + daily_read AS read_window, + row_number() OVER ( + PARTITION BY user_id ORDER BY daily_read DESC + ) AS seqnum + FROM document_windows + GROUP BY user_id, daily_read +), +streaks AS ( + SELECT + COUNT(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date, + window, + user_id, + timezone + FROM daily_partitions + GROUP BY + timezone, + user_id, + DATE(read_window, '+' || seqnum || ' day') + + UNION ALL + + SELECT + COUNT(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date, + window, + user_id, + timezone + FROM weekly_partitions + GROUP BY + timezone, + user_id, + DATE(read_window, '+' || (seqnum * 7) || ' day') +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date, + window, + user_id + FROM streaks + GROUP BY user_id, window +), +current_streak AS ( + SELECT + streak AS current_streak, + start_date AS current_streak_start_date, + end_date AS current_streak_end_date, + window, + user_id + FROM streaks + WHERE CASE + WHEN window = "WEEK" THEN + DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-14 day') = current_streak_end_date + OR DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-7 day') = current_streak_end_date + WHEN window = "DAY" THEN + DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), '-1 day') = current_streak_end_date + OR DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) = current_streak_end_date + END + GROUP BY user_id, window +) +SELECT + max_streak.user_id, + max_streak.window, + IFNULL(max_streak, 0) AS max_streak, + IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date, + IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date, + IFNULL(current_streak, 0) AS current_streak, + IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date, + IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date +FROM max_streak +LEFT JOIN current_streak ON + current_streak.user_id = max_streak.user_id + AND current_streak.window = max_streak.window; + +-------------------------------- +------- Document Stats --------- +-------------------------------- + +CREATE VIEW view_document_user_statistics AS + +WITH intermediate_ga AS ( + SELECT + ga1.id AS row_id, + ga1.user_id, + ga1.document_id, + ga1.duration, + ga1.start_time, + ga1.start_percentage, + ga1.end_percentage, + + -- Find Overlapping Events (Assign Unique ID) + ( + SELECT MIN(id) + FROM activity AS ga2 + WHERE + ga1.document_id = ga2.document_id + AND ga1.user_id = ga2.user_id + AND ga1.start_percentage <= ga2.end_percentage + AND ga1.end_percentage >= ga2.start_percentage + ) AS group_leader + FROM activity AS ga1 +), + +grouped_activity AS ( + SELECT + user_id, + document_id, + MAX(start_time) AS start_time, + MIN(start_percentage) AS start_percentage, + MAX(end_percentage) AS end_percentage, + MAX(end_percentage) - MIN(start_percentage) AS read_percentage, + SUM(duration) AS duration + FROM intermediate_ga + GROUP BY group_leader +), + +current_progress AS ( + SELECT + user_id, + document_id, + COALESCE(( + SELECT percentage + FROM document_progress AS dp + WHERE + dp.user_id = iga.user_id + AND dp.document_id = iga.document_id + ORDER BY created_at DESC + LIMIT 1 + ), end_percentage) AS percentage + FROM intermediate_ga AS iga + GROUP BY user_id, document_id + HAVING MAX(start_time) +) + +SELECT + ga.document_id, + ga.user_id, + cp.percentage, + MAX(start_time) AS last_read, + SUM(read_percentage) AS read_percentage, + + -- All Time WPM + SUM(duration) AS total_time_seconds, + (CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage)) + AS total_words_read, + (CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage)) + / (SUM(duration) / 60.0) AS total_wpm, + + -- Yearly WPM + SUM(CASE WHEN start_time >= DATE('now', '-1 year') THEN duration ELSE 0 END) + AS yearly_time_seconds, + ( + CAST(COALESCE(d.words, 0.0) AS REAL) + * SUM( + CASE + WHEN start_time >= DATE('now', '-1 year') THEN read_percentage + ELSE 0 + END + ) + ) + AS yearly_words_read, + COALESCE(( + CAST(COALESCE(d.words, 0.0) AS REAL) + * SUM( + CASE + WHEN start_time >= DATE('now', '-1 year') THEN read_percentage + END + ) + ) + / ( + SUM( + CASE + WHEN start_time >= DATE('now', '-1 year') THEN duration + END + ) + / 60.0 + ), 0.0) + AS yearly_wpm, + + -- Monthly WPM + SUM( + CASE WHEN start_time >= DATE('now', '-1 month') THEN duration ELSE 0 END + ) + AS monthly_time_seconds, + ( + CAST(COALESCE(d.words, 0.0) AS REAL) + * SUM( + CASE + WHEN start_time >= DATE('now', '-1 month') THEN read_percentage + ELSE 0 + END + ) + ) + AS monthly_words_read, + COALESCE(( + CAST(COALESCE(d.words, 0.0) AS REAL) + * SUM( + CASE + WHEN start_time >= DATE('now', '-1 month') THEN read_percentage + END + ) + ) + / ( + SUM( + CASE + WHEN start_time >= DATE('now', '-1 month') THEN duration + END + ) + / 60.0 + ), 0.0) + AS monthly_wpm, + + -- Weekly WPM + SUM(CASE WHEN start_time >= DATE('now', '-7 days') THEN duration ELSE 0 END) + AS weekly_time_seconds, + ( + CAST(COALESCE(d.words, 0.0) AS REAL) + * SUM( + CASE + WHEN start_time >= DATE('now', '-7 days') THEN read_percentage + ELSE 0 + END + ) + ) + AS weekly_words_read, + COALESCE(( + CAST(COALESCE(d.words, 0.0) AS REAL) + * SUM( + CASE + WHEN start_time >= DATE('now', '-7 days') THEN read_percentage + END + ) + ) + / ( + SUM( + CASE + WHEN start_time >= DATE('now', '-7 days') THEN duration + END + ) + / 60.0 + ), 0.0) + AS weekly_wpm + +FROM grouped_activity AS ga +INNER JOIN + current_progress AS cp + ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id +INNER JOIN + documents AS d + ON ga.document_id = d.id +GROUP BY ga.document_id, ga.user_id +ORDER BY total_wpm DESC; + + diff --git a/sqlc.yaml b/sqlc.yaml index 9f6b8a5..f6c39ee 100644 --- a/sqlc.yaml +++ b/sqlc.yaml @@ -120,7 +120,7 @@ sql: go_type: type: "string" pointer: true - - column: "users.time_offset" + - column: "users.timezone" go_type: type: "string" pointer: true diff --git a/templates/pages/settings.tmpl b/templates/pages/settings.tmpl index 522cf13..c06c8ee 100644 --- a/templates/pages/settings.tmpl +++ b/templates/pages/settings.tmpl @@ -51,7 +51,7 @@ {{ end }}
-

Change Time Offset

+

Change Timezone

@@ -60,11 +60,11 @@ {{ template "svg/clock" (dict "Size" 15) }}