Compare commits
2 Commits
8e81acd381
...
2d206826d6
Author | SHA1 | Date | |
---|---|---|---|
2d206826d6 | |||
f1414e3e4e |
@ -157,6 +157,7 @@ func (api *API) registerWebAppRoutes(router *gin.Engine) {
|
|||||||
router.GET("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminImport)
|
router.GET("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminImport)
|
||||||
router.POST("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminImport)
|
router.POST("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminImport)
|
||||||
router.GET("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminUsers)
|
router.GET("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminUsers)
|
||||||
|
router.POST("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appUpdateAdminUsers)
|
||||||
router.GET("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdmin)
|
router.GET("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdmin)
|
||||||
router.POST("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminAction)
|
router.POST("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminAction)
|
||||||
router.POST("/login", api.appAuthLogin)
|
router.POST("/login", api.appAuthLogin)
|
||||||
@ -228,7 +229,7 @@ func (api *API) generateTemplates() *multitemplate.Renderer {
|
|||||||
"dict": dict,
|
"dict": dict,
|
||||||
"fields": fields,
|
"fields": fields,
|
||||||
"getSVGGraphData": getSVGGraphData,
|
"getSVGGraphData": getSVGGraphData,
|
||||||
"getUTCOffsets": getUTCOffsets,
|
"getTimeZones": getTimeZones,
|
||||||
"hasPrefix": strings.HasPrefix,
|
"hasPrefix": strings.HasPrefix,
|
||||||
"niceNumbers": niceNumbers,
|
"niceNumbers": niceNumbers,
|
||||||
"niceSeconds": niceSeconds,
|
"niceSeconds": niceSeconds,
|
||||||
|
@ -54,6 +54,20 @@ type requestAdminImport struct {
|
|||||||
Type importType `form:"type"`
|
Type importType `form:"type"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type operationType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
opUpdate operationType = "UPDATE"
|
||||||
|
opCreate operationType = "CREATE"
|
||||||
|
)
|
||||||
|
|
||||||
|
type requestAdminUpdateUser struct {
|
||||||
|
User string `form:"user"`
|
||||||
|
Password string `form:"password"`
|
||||||
|
isAdmin bool `form:"is_admin"`
|
||||||
|
Operation operationType `form:"operation"`
|
||||||
|
}
|
||||||
|
|
||||||
type requestAdminLogs struct {
|
type requestAdminLogs struct {
|
||||||
Filter string `form:"filter"`
|
Filter string `form:"filter"`
|
||||||
}
|
}
|
||||||
@ -225,6 +239,45 @@ func (api *API) appGetAdminUsers(c *gin.Context) {
|
|||||||
c.HTML(http.StatusOK, "page/admin-users", templateVars)
|
c.HTML(http.StatusOK, "page/admin-users", templateVars)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (api *API) appUpdateAdminUsers(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
|
||||||
|
|
||||||
|
var rAdminUserUpdate requestAdminUpdateUser
|
||||||
|
if err := c.ShouldBind(&rAdminUserUpdate); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid user update")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
switch rAdminUserUpdate.Operation {
|
||||||
|
case opCreate:
|
||||||
|
err = api.createUser(rAdminUserUpdate.User, rAdminUserUpdate.Password)
|
||||||
|
case opUpdate:
|
||||||
|
err = fmt.Errorf("unimplemented")
|
||||||
|
default:
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unknown user operation")
|
||||||
|
return
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Unable to create user: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
users, err := api.db.Queries.GetUsers(api.db.Ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUsers DB Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["Data"] = users
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-users", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
func (api *API) appGetAdminImport(c *gin.Context) {
|
func (api *API) appGetAdminImport(c *gin.Context) {
|
||||||
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
|
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ type requestDocumentIdentify struct {
|
|||||||
type requestSettingsEdit struct {
|
type requestSettingsEdit struct {
|
||||||
Password *string `form:"password"`
|
Password *string `form:"password"`
|
||||||
NewPassword *string `form:"new_password"`
|
NewPassword *string `form:"new_password"`
|
||||||
TimeOffset *string `form:"time_offset"`
|
Timezone *string `form:"timezone"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type requestDocumentAdd struct {
|
type requestDocumentAdd struct {
|
||||||
@ -299,7 +299,7 @@ func (api *API) appGetSettings(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
templateVars["Data"] = gin.H{
|
templateVars["Data"] = gin.H{
|
||||||
"TimeOffset": *user.TimeOffset,
|
"Timezone": *user.Timezone,
|
||||||
"Devices": devices,
|
"Devices": devices,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -869,7 +869,7 @@ func (api *API) appEditSettings(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Validate Something Exists
|
// Validate Something Exists
|
||||||
if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.TimeOffset == nil {
|
if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.Timezone == nil {
|
||||||
log.Error("Missing Form Values")
|
log.Error("Missing Form Values")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
return
|
return
|
||||||
@ -901,9 +901,9 @@ func (api *API) appEditSettings(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Set Time Offset
|
// Set Time Offset
|
||||||
if rUserSettings.TimeOffset != nil {
|
if rUserSettings.Timezone != nil {
|
||||||
templateVars["TimeOffsetMessage"] = "Time Offset Updated"
|
templateVars["TimeOffsetMessage"] = "Time Offset Updated"
|
||||||
newUserSettings.TimeOffset = rUserSettings.TimeOffset
|
newUserSettings.Timezone = rUserSettings.Timezone
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update User
|
// Update User
|
||||||
@ -931,7 +931,7 @@ func (api *API) appEditSettings(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
templateVars["Data"] = gin.H{
|
templateVars["Data"] = gin.H{
|
||||||
"TimeOffset": *user.TimeOffset,
|
"Timezone": *user.Timezone,
|
||||||
"Devices": devices,
|
"Devices": devices,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
52
api/auth.go
52
api/auth.go
@ -460,3 +460,55 @@ func (api *API) rotateAllAuthHashes() error {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (api *API) createUser(username string, rawPassword string) error {
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
||||||
|
|
||||||
|
if username == "" {
|
||||||
|
return fmt.Errorf("username can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rawPassword == "" {
|
||||||
|
return fmt.Errorf("password can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create hashed password")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate auth hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create token for user")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current users
|
||||||
|
currentUsers, err := api.db.Queries.GetUsers(api.db.Ctx)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to get current users")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if we should be admin
|
||||||
|
isAdmin := false
|
||||||
|
if len(currentUsers) == 0 {
|
||||||
|
isAdmin = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user in DB
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
if rows, err := api.db.Queries.CreateUser(api.db.Ctx, database.CreateUserParams{
|
||||||
|
ID: username,
|
||||||
|
Pass: &hashedPassword,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
Admin: isAdmin,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("CreateUser DB Error:", err)
|
||||||
|
return fmt.Errorf("unable to create user")
|
||||||
|
} else if rows == 0 {
|
||||||
|
log.Warn("User Already Exists:", username)
|
||||||
|
return fmt.Errorf("user already exists")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
83
api/utils.go
83
api/utils.go
@ -13,54 +13,43 @@ import (
|
|||||||
"reichard.io/antholume/metadata"
|
"reichard.io/antholume/metadata"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UTCOffset struct {
|
func getTimeZones() []string {
|
||||||
Name string
|
return []string{
|
||||||
Value string
|
"Africa/Cairo",
|
||||||
|
"Africa/Johannesburg",
|
||||||
|
"Africa/Lagos",
|
||||||
|
"Africa/Nairobi",
|
||||||
|
"America/Adak",
|
||||||
|
"America/Anchorage",
|
||||||
|
"America/Buenos_Aires",
|
||||||
|
"America/Chicago",
|
||||||
|
"America/Denver",
|
||||||
|
"America/Los_Angeles",
|
||||||
|
"America/Mexico_City",
|
||||||
|
"America/New_York",
|
||||||
|
"America/Nuuk",
|
||||||
|
"America/Phoenix",
|
||||||
|
"America/Puerto_Rico",
|
||||||
|
"America/Sao_Paulo",
|
||||||
|
"America/St_Johns",
|
||||||
|
"America/Toronto",
|
||||||
|
"Asia/Dubai",
|
||||||
|
"Asia/Hong_Kong",
|
||||||
|
"Asia/Kolkata",
|
||||||
|
"Asia/Seoul",
|
||||||
|
"Asia/Shanghai",
|
||||||
|
"Asia/Singapore",
|
||||||
|
"Asia/Tokyo",
|
||||||
|
"Atlantic/Azores",
|
||||||
|
"Australia/Melbourne",
|
||||||
|
"Australia/Sydney",
|
||||||
|
"Europe/Berlin",
|
||||||
|
"Europe/London",
|
||||||
|
"Europe/Moscow",
|
||||||
|
"Europe/Paris",
|
||||||
|
"Pacific/Auckland",
|
||||||
|
"Pacific/Honolulu",
|
||||||
}
|
}
|
||||||
|
|
||||||
var UTC_OFFSETS = []UTCOffset{
|
|
||||||
{Value: "-12 hours", Name: "UTC−12:00"},
|
|
||||||
{Value: "-11 hours", Name: "UTC−11:00"},
|
|
||||||
{Value: "-10 hours", Name: "UTC−10:00"},
|
|
||||||
{Value: "-9.5 hours", Name: "UTC−09:30"},
|
|
||||||
{Value: "-9 hours", Name: "UTC−09:00"},
|
|
||||||
{Value: "-8 hours", Name: "UTC−08:00"},
|
|
||||||
{Value: "-7 hours", Name: "UTC−07:00"},
|
|
||||||
{Value: "-6 hours", Name: "UTC−06:00"},
|
|
||||||
{Value: "-5 hours", Name: "UTC−05:00"},
|
|
||||||
{Value: "-4 hours", Name: "UTC−04:00"},
|
|
||||||
{Value: "-3.5 hours", Name: "UTC−03:30"},
|
|
||||||
{Value: "-3 hours", Name: "UTC−03:00"},
|
|
||||||
{Value: "-2 hours", Name: "UTC−02:00"},
|
|
||||||
{Value: "-1 hours", Name: "UTC−01:00"},
|
|
||||||
{Value: "0 hours", Name: "UTC±00:00"},
|
|
||||||
{Value: "+1 hours", Name: "UTC+01:00"},
|
|
||||||
{Value: "+2 hours", Name: "UTC+02:00"},
|
|
||||||
{Value: "+3 hours", Name: "UTC+03:00"},
|
|
||||||
{Value: "+3.5 hours", Name: "UTC+03:30"},
|
|
||||||
{Value: "+4 hours", Name: "UTC+04:00"},
|
|
||||||
{Value: "+4.5 hours", Name: "UTC+04:30"},
|
|
||||||
{Value: "+5 hours", Name: "UTC+05:00"},
|
|
||||||
{Value: "+5.5 hours", Name: "UTC+05:30"},
|
|
||||||
{Value: "+5.75 hours", Name: "UTC+05:45"},
|
|
||||||
{Value: "+6 hours", Name: "UTC+06:00"},
|
|
||||||
{Value: "+6.5 hours", Name: "UTC+06:30"},
|
|
||||||
{Value: "+7 hours", Name: "UTC+07:00"},
|
|
||||||
{Value: "+8 hours", Name: "UTC+08:00"},
|
|
||||||
{Value: "+8.75 hours", Name: "UTC+08:45"},
|
|
||||||
{Value: "+9 hours", Name: "UTC+09:00"},
|
|
||||||
{Value: "+9.5 hours", Name: "UTC+09:30"},
|
|
||||||
{Value: "+10 hours", Name: "UTC+10:00"},
|
|
||||||
{Value: "+10.5 hours", Name: "UTC+10:30"},
|
|
||||||
{Value: "+11 hours", Name: "UTC+11:00"},
|
|
||||||
{Value: "+12 hours", Name: "UTC+12:00"},
|
|
||||||
{Value: "+12.75 hours", Name: "UTC+12:45"},
|
|
||||||
{Value: "+13 hours", Name: "UTC+13:00"},
|
|
||||||
{Value: "+14 hours", Name: "UTC+14:00"},
|
|
||||||
}
|
|
||||||
|
|
||||||
func getUTCOffsets() []UTCOffset {
|
|
||||||
return UTC_OFFSETS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func niceSeconds(input int64) (result string) {
|
func niceSeconds(input int64) (result string) {
|
||||||
|
@ -3,15 +3,17 @@ package database
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"database/sql/driver"
|
||||||
"embed"
|
"embed"
|
||||||
_ "embed"
|
_ "embed"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pressly/goose/v3"
|
"github.com/pressly/goose/v3"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
_ "modernc.org/sqlite"
|
sqlite "modernc.org/sqlite"
|
||||||
"reichard.io/antholume/config"
|
"reichard.io/antholume/config"
|
||||||
_ "reichard.io/antholume/database/migrations"
|
_ "reichard.io/antholume/database/migrations"
|
||||||
)
|
)
|
||||||
@ -26,9 +28,21 @@ type DBManager struct {
|
|||||||
//go:embed schema.sql
|
//go:embed schema.sql
|
||||||
var ddl string
|
var ddl string
|
||||||
|
|
||||||
|
//go:embed views.sql
|
||||||
|
var views string
|
||||||
|
|
||||||
//go:embed migrations/*
|
//go:embed migrations/*
|
||||||
var migrations embed.FS
|
var migrations embed.FS
|
||||||
|
|
||||||
|
// Register scalar sqlite function on init
|
||||||
|
func init() {
|
||||||
|
sqlite.MustRegisterFunction("LOCAL_TIME", &sqlite.FunctionImpl{
|
||||||
|
NArgs: 2,
|
||||||
|
Deterministic: true,
|
||||||
|
Scalar: localTime,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// Returns an initialized manager
|
// Returns an initialized manager
|
||||||
func NewMgr(c *config.Config) *DBManager {
|
func NewMgr(c *config.Config) *DBManager {
|
||||||
// Create Manager
|
// Create Manager
|
||||||
@ -90,6 +104,12 @@ func (dbm *DBManager) init() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Execute views
|
||||||
|
if _, err := dbm.DB.Exec(views, nil); err != nil {
|
||||||
|
log.Panicf("Error executing views: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// Update settings
|
// Update settings
|
||||||
err = dbm.updateSettings()
|
err = dbm.updateSettings()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -182,6 +202,7 @@ func (dbm *DBManager) performMigrations(isNew bool) error {
|
|||||||
return goose.UpContext(ctx, dbm.DB, "migrations")
|
return goose.UpContext(ctx, dbm.DB, "migrations")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Determines whether the database is empty
|
||||||
func isEmpty(db *sql.DB) (bool, error) {
|
func isEmpty(db *sql.DB) (bool, error) {
|
||||||
var tableCount int
|
var tableCount int
|
||||||
err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount)
|
err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount)
|
||||||
@ -190,3 +211,28 @@ func isEmpty(db *sql.DB) (bool, error) {
|
|||||||
}
|
}
|
||||||
return tableCount == 0, nil
|
return tableCount == 0, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LOCAL_TIME custom SQL function
|
||||||
|
func localTime(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
|
||||||
|
timeStr, ok := args[0].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZoneStr, ok := args[1].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZone, err := time.LoadLocation(timeZoneStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse timezone")
|
||||||
|
}
|
||||||
|
|
||||||
|
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse time")
|
||||||
|
}
|
||||||
|
|
||||||
|
return formattedTime.In(timeZone).Format("2006-01-02 15:04:05.000"), nil
|
||||||
|
}
|
||||||
|
58
database/migrations/20240311121111_user_timezone.go
Normal file
58
database/migrations/20240311121111_user_timezone.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
goose.AddMigrationContext(upUserTimezone, downUserTimezone)
|
||||||
|
}
|
||||||
|
|
||||||
|
func upUserTimezone(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Determine if we have a new DB or not
|
||||||
|
isNew := ctx.Value("isNew").(bool)
|
||||||
|
if isNew {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy table & create column
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
-- Copy Table
|
||||||
|
CREATE TABLE temp_users AS SELECT * FROM users;
|
||||||
|
ALTER TABLE temp_users DROP COLUMN time_offset;
|
||||||
|
ALTER TABLE temp_users ADD COLUMN timezone TEXT;
|
||||||
|
UPDATE temp_users SET timezone = 'Europe/London';
|
||||||
|
|
||||||
|
-- Clean Table
|
||||||
|
DELETE FROM users;
|
||||||
|
ALTER TABLE users DROP COLUMN time_offset;
|
||||||
|
ALTER TABLE users ADD COLUMN timezone TEXT NOT NULL DEFAULT 'Europe/London';
|
||||||
|
|
||||||
|
-- Copy Temp Table -> Clean Table
|
||||||
|
INSERT INTO users SELECT * FROM temp_users;
|
||||||
|
|
||||||
|
-- Drop Temp Table
|
||||||
|
DROP TABLE temp_users;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downUserTimezone(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Update column name & value
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
ALTER TABLE users RENAME COLUMN timezone TO time_offset;
|
||||||
|
UPDATE users SET time_offset = '0 hours';
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -103,7 +103,7 @@ type User struct {
|
|||||||
Pass *string `json:"-"`
|
Pass *string `json:"-"`
|
||||||
AuthHash *string `json:"auth_hash"`
|
AuthHash *string `json:"auth_hash"`
|
||||||
Admin bool `json:"-"`
|
Admin bool `json:"-"`
|
||||||
TimeOffset *string `json:"time_offset"`
|
Timezone *string `json:"timezone"`
|
||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,34 +117,3 @@ type UserStreak struct {
|
|||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ViewDocumentUserStatistic struct {
|
|
||||||
DocumentID string `json:"document_id"`
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
|
||||||
LastRead interface{} `json:"last_read"`
|
|
||||||
ReadPercentage *float64 `json:"read_percentage"`
|
|
||||||
TotalTimeSeconds *float64 `json:"total_time_seconds"`
|
|
||||||
TotalWordsRead interface{} `json:"total_words_read"`
|
|
||||||
TotalWpm int64 `json:"total_wpm"`
|
|
||||||
YearlyTimeSeconds *float64 `json:"yearly_time_seconds"`
|
|
||||||
YearlyWordsRead interface{} `json:"yearly_words_read"`
|
|
||||||
YearlyWpm interface{} `json:"yearly_wpm"`
|
|
||||||
MonthlyTimeSeconds *float64 `json:"monthly_time_seconds"`
|
|
||||||
MonthlyWordsRead interface{} `json:"monthly_words_read"`
|
|
||||||
MonthlyWpm interface{} `json:"monthly_wpm"`
|
|
||||||
WeeklyTimeSeconds *float64 `json:"weekly_time_seconds"`
|
|
||||||
WeeklyWordsRead interface{} `json:"weekly_words_read"`
|
|
||||||
WeeklyWpm interface{} `json:"weekly_wpm"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ViewUserStreak struct {
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
Window string `json:"window"`
|
|
||||||
MaxStreak interface{} `json:"max_streak"`
|
|
||||||
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
|
|
||||||
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
|
|
||||||
CurrentStreak interface{} `json:"current_streak"`
|
|
||||||
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
|
|
||||||
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
|
|
||||||
}
|
|
||||||
|
@ -64,7 +64,7 @@ WITH filtered_activity AS (
|
|||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(activity.start_time, users.timezone)) AS TEXT) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
@ -77,7 +77,7 @@ LEFT JOIN users ON users.id = activity.user_id;
|
|||||||
|
|
||||||
-- name: GetDailyReadStats :many
|
-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days AS (
|
WITH RECURSIVE last_30_days AS (
|
||||||
SELECT DATE('now', time_offset) AS date
|
SELECT DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) AS date
|
||||||
FROM users WHERE users.id = $user_id
|
FROM users WHERE users.id = $user_id
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT DATE(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
@ -96,7 +96,7 @@ filtered_activity AS (
|
|||||||
activity_days AS (
|
activity_days AS (
|
||||||
SELECT
|
SELECT
|
||||||
SUM(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
DATE(LOCAL_TIME(start_time, timezone)) AS day
|
||||||
FROM filtered_activity AS activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
@ -135,8 +135,8 @@ WHERE id = $device_id LIMIT 1;
|
|||||||
SELECT
|
SELECT
|
||||||
devices.id,
|
devices.id,
|
||||||
devices.device_name,
|
devices.device_name,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.created_at, users.timezone)) AS TEXT) AS created_at,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.last_synced, users.timezone)) AS TEXT) AS last_synced
|
||||||
FROM devices
|
FROM devices
|
||||||
JOIN users ON users.id = devices.user_id
|
JOIN users ON users.id = devices.user_id
|
||||||
WHERE users.id = $user_id
|
WHERE users.id = $user_id
|
||||||
@ -174,7 +174,7 @@ SELECT
|
|||||||
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), LOCAL_TIME(users.timezone))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
@ -226,7 +226,7 @@ SELECT
|
|||||||
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, "1970-01-01"), users.timezone))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
@ -280,7 +280,7 @@ SELECT
|
|||||||
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
||||||
progress.document_id,
|
progress.document_id,
|
||||||
progress.user_id,
|
progress.user_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, users.time_offset) AS TEXT) AS created_at
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, LOCAL_TIME(users.timezone)) AS TEXT) AS created_at
|
||||||
FROM document_progress AS progress
|
FROM document_progress AS progress
|
||||||
LEFT JOIN users ON progress.user_id = users.id
|
LEFT JOIN users ON progress.user_id = users.id
|
||||||
LEFT JOIN devices ON progress.device_id = devices.id
|
LEFT JOIN devices ON progress.device_id = devices.id
|
||||||
@ -369,7 +369,7 @@ UPDATE users
|
|||||||
SET
|
SET
|
||||||
pass = COALESCE($password, pass),
|
pass = COALESCE($password, pass),
|
||||||
auth_hash = COALESCE($auth_hash, auth_hash),
|
auth_hash = COALESCE($auth_hash, auth_hash),
|
||||||
time_offset = COALESCE($time_offset, time_offset),
|
timezone = COALESCE($timezone, timezone),
|
||||||
admin = COALESCE($admin, admin)
|
admin = COALESCE($admin, admin)
|
||||||
WHERE id = $user_id
|
WHERE id = $user_id
|
||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
@ -181,7 +181,7 @@ WITH filtered_activity AS (
|
|||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(activity.start_time, users.timezone)) AS TEXT) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
@ -254,7 +254,7 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Get
|
|||||||
|
|
||||||
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days AS (
|
WITH RECURSIVE last_30_days AS (
|
||||||
SELECT DATE('now', time_offset) AS date
|
SELECT DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) AS date
|
||||||
FROM users WHERE users.id = ?1
|
FROM users WHERE users.id = ?1
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT DATE(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
@ -273,7 +273,7 @@ filtered_activity AS (
|
|||||||
activity_days AS (
|
activity_days AS (
|
||||||
SELECT
|
SELECT
|
||||||
SUM(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
DATE(LOCAL_TIME(start_time, timezone)) AS day
|
||||||
FROM filtered_activity AS activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
@ -410,8 +410,8 @@ const getDevices = `-- name: GetDevices :many
|
|||||||
SELECT
|
SELECT
|
||||||
devices.id,
|
devices.id,
|
||||||
devices.device_name,
|
devices.device_name,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.created_at, users.timezone)) AS TEXT) AS created_at,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(devices.last_synced, users.timezone)) AS TEXT) AS last_synced
|
||||||
FROM devices
|
FROM devices
|
||||||
JOIN users ON users.id = devices.user_id
|
JOIN users ON users.id = devices.user_id
|
||||||
WHERE users.id = ?1
|
WHERE users.id = ?1
|
||||||
@ -544,7 +544,7 @@ SELECT
|
|||||||
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), LOCAL_TIME(users.timezone))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
@ -698,7 +698,9 @@ SELECT
|
|||||||
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
|
||||||
|
-- LOCAL_TIME(STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01")), users.timezone)
|
||||||
|
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
@ -887,7 +889,7 @@ SELECT
|
|||||||
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
||||||
progress.document_id,
|
progress.document_id,
|
||||||
progress.user_id,
|
progress.user_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, users.time_offset) AS TEXT) AS created_at
|
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', progress.created_at, LOCAL_TIME(users.timezone)) AS TEXT) AS created_at
|
||||||
FROM document_progress AS progress
|
FROM document_progress AS progress
|
||||||
LEFT JOIN users ON progress.user_id = users.id
|
LEFT JOIN users ON progress.user_id = users.id
|
||||||
LEFT JOIN devices ON progress.device_id = devices.id
|
LEFT JOIN devices ON progress.device_id = devices.id
|
||||||
@ -961,7 +963,7 @@ func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) ([]Get
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getUser = `-- name: GetUser :one
|
const getUser = `-- name: GetUser :one
|
||||||
SELECT id, pass, auth_hash, admin, time_offset, created_at FROM users
|
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users
|
||||||
WHERE id = ?1 LIMIT 1
|
WHERE id = ?1 LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@ -973,7 +975,7 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
|
|||||||
&i.Pass,
|
&i.Pass,
|
||||||
&i.AuthHash,
|
&i.AuthHash,
|
||||||
&i.Admin,
|
&i.Admin,
|
||||||
&i.TimeOffset,
|
&i.Timezone,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
@ -1100,7 +1102,7 @@ func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStre
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getUsers = `-- name: GetUsers :many
|
const getUsers = `-- name: GetUsers :many
|
||||||
SELECT id, pass, auth_hash, admin, time_offset, created_at FROM users
|
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users
|
||||||
`
|
`
|
||||||
|
|
||||||
func (q *Queries) GetUsers(ctx context.Context) ([]User, error) {
|
func (q *Queries) GetUsers(ctx context.Context) ([]User, error) {
|
||||||
@ -1117,7 +1119,7 @@ func (q *Queries) GetUsers(ctx context.Context) ([]User, error) {
|
|||||||
&i.Pass,
|
&i.Pass,
|
||||||
&i.AuthHash,
|
&i.AuthHash,
|
||||||
&i.Admin,
|
&i.Admin,
|
||||||
&i.TimeOffset,
|
&i.Timezone,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -1251,16 +1253,16 @@ UPDATE users
|
|||||||
SET
|
SET
|
||||||
pass = COALESCE(?1, pass),
|
pass = COALESCE(?1, pass),
|
||||||
auth_hash = COALESCE(?2, auth_hash),
|
auth_hash = COALESCE(?2, auth_hash),
|
||||||
time_offset = COALESCE(?3, time_offset),
|
timezone = COALESCE(?3, timezone),
|
||||||
admin = COALESCE(?4, admin)
|
admin = COALESCE(?4, admin)
|
||||||
WHERE id = ?5
|
WHERE id = ?5
|
||||||
RETURNING id, pass, auth_hash, admin, time_offset, created_at
|
RETURNING id, pass, auth_hash, admin, timezone, created_at
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpdateUserParams struct {
|
type UpdateUserParams struct {
|
||||||
Password *string `json:"-"`
|
Password *string `json:"-"`
|
||||||
AuthHash *string `json:"auth_hash"`
|
AuthHash *string `json:"auth_hash"`
|
||||||
TimeOffset *string `json:"time_offset"`
|
Timezone *string `json:"timezone"`
|
||||||
Admin bool `json:"-"`
|
Admin bool `json:"-"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
}
|
}
|
||||||
@ -1269,7 +1271,7 @@ func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, e
|
|||||||
row := q.db.QueryRowContext(ctx, updateUser,
|
row := q.db.QueryRowContext(ctx, updateUser,
|
||||||
arg.Password,
|
arg.Password,
|
||||||
arg.AuthHash,
|
arg.AuthHash,
|
||||||
arg.TimeOffset,
|
arg.Timezone,
|
||||||
arg.Admin,
|
arg.Admin,
|
||||||
arg.UserID,
|
arg.UserID,
|
||||||
)
|
)
|
||||||
@ -1279,7 +1281,7 @@ func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, e
|
|||||||
&i.Pass,
|
&i.Pass,
|
||||||
&i.AuthHash,
|
&i.AuthHash,
|
||||||
&i.Admin,
|
&i.Admin,
|
||||||
&i.TimeOffset,
|
&i.Timezone,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
|
@ -9,7 +9,7 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
pass TEXT NOT NULL,
|
pass TEXT NOT NULL,
|
||||||
auth_hash TEXT NOT NULL,
|
auth_hash TEXT NOT NULL,
|
||||||
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
|
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
|
||||||
time_offset TEXT NOT NULL DEFAULT '0 hours',
|
timezone TEXT NOT NULL DEFAULT 'Europe/London',
|
||||||
|
|
||||||
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
||||||
);
|
);
|
||||||
@ -174,309 +174,9 @@ CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
|
|||||||
document_id
|
document_id
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
|
||||||
---------------------------- Views ----------------------------
|
|
||||||
---------------------------------------------------------------
|
|
||||||
|
|
||||||
DROP VIEW IF EXISTS view_user_streaks;
|
DROP VIEW IF EXISTS view_user_streaks;
|
||||||
DROP VIEW IF EXISTS view_document_user_statistics;
|
DROP VIEW IF EXISTS view_document_user_statistics;
|
||||||
|
|
||||||
--------------------------------
|
|
||||||
--------- User Streaks ---------
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
CREATE VIEW view_user_streaks AS
|
|
||||||
|
|
||||||
WITH document_windows AS (
|
|
||||||
SELECT
|
|
||||||
activity.user_id,
|
|
||||||
users.time_offset,
|
|
||||||
DATE(
|
|
||||||
activity.start_time,
|
|
||||||
users.time_offset,
|
|
||||||
'weekday 0', '-7 day'
|
|
||||||
) AS weekly_read,
|
|
||||||
DATE(activity.start_time, users.time_offset) AS daily_read
|
|
||||||
FROM activity
|
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
|
||||||
GROUP BY activity.user_id, weekly_read, daily_read
|
|
||||||
),
|
|
||||||
weekly_partitions AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
time_offset,
|
|
||||||
'WEEK' AS "window",
|
|
||||||
weekly_read AS read_window,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY user_id ORDER BY weekly_read DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
GROUP BY user_id, weekly_read
|
|
||||||
),
|
|
||||||
daily_partitions AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
time_offset,
|
|
||||||
'DAY' AS "window",
|
|
||||||
daily_read AS read_window,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY user_id ORDER BY daily_read DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
GROUP BY user_id, daily_read
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
window,
|
|
||||||
user_id,
|
|
||||||
time_offset
|
|
||||||
FROM daily_partitions
|
|
||||||
GROUP BY
|
|
||||||
time_offset,
|
|
||||||
user_id,
|
|
||||||
DATE(read_window, '+' || seqnum || ' day')
|
|
||||||
|
|
||||||
UNION ALL
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
window,
|
|
||||||
user_id,
|
|
||||||
time_offset
|
|
||||||
FROM weekly_partitions
|
|
||||||
GROUP BY
|
|
||||||
time_offset,
|
|
||||||
user_id,
|
|
||||||
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date,
|
|
||||||
window,
|
|
||||||
user_id
|
|
||||||
FROM streaks
|
|
||||||
GROUP BY user_id, window
|
|
||||||
),
|
|
||||||
current_streak AS (
|
|
||||||
SELECT
|
|
||||||
streak AS current_streak,
|
|
||||||
start_date AS current_streak_start_date,
|
|
||||||
end_date AS current_streak_end_date,
|
|
||||||
window,
|
|
||||||
user_id
|
|
||||||
FROM streaks
|
|
||||||
WHERE CASE
|
|
||||||
WHEN window = "WEEK" THEN
|
|
||||||
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
|
||||||
WHEN window = "DAY" THEN
|
|
||||||
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset) = current_streak_end_date
|
|
||||||
END
|
|
||||||
GROUP BY user_id, window
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
max_streak.user_id,
|
|
||||||
max_streak.window,
|
|
||||||
IFNULL(max_streak, 0) AS max_streak,
|
|
||||||
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
|
||||||
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
|
||||||
IFNULL(current_streak, 0) AS current_streak,
|
|
||||||
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
|
|
||||||
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
|
|
||||||
FROM max_streak
|
|
||||||
LEFT JOIN current_streak ON
|
|
||||||
current_streak.user_id = max_streak.user_id
|
|
||||||
AND current_streak.window = max_streak.window;
|
|
||||||
|
|
||||||
--------------------------------
|
|
||||||
------- Document Stats ---------
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
CREATE VIEW view_document_user_statistics AS
|
|
||||||
|
|
||||||
WITH intermediate_ga AS (
|
|
||||||
SELECT
|
|
||||||
ga1.id AS row_id,
|
|
||||||
ga1.user_id,
|
|
||||||
ga1.document_id,
|
|
||||||
ga1.duration,
|
|
||||||
ga1.start_time,
|
|
||||||
ga1.start_percentage,
|
|
||||||
ga1.end_percentage,
|
|
||||||
|
|
||||||
-- Find Overlapping Events (Assign Unique ID)
|
|
||||||
(
|
|
||||||
SELECT MIN(id)
|
|
||||||
FROM activity AS ga2
|
|
||||||
WHERE
|
|
||||||
ga1.document_id = ga2.document_id
|
|
||||||
AND ga1.user_id = ga2.user_id
|
|
||||||
AND ga1.start_percentage <= ga2.end_percentage
|
|
||||||
AND ga1.end_percentage >= ga2.start_percentage
|
|
||||||
) AS group_leader
|
|
||||||
FROM activity AS ga1
|
|
||||||
),
|
|
||||||
|
|
||||||
grouped_activity AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
MAX(start_time) AS start_time,
|
|
||||||
MIN(start_percentage) AS start_percentage,
|
|
||||||
MAX(end_percentage) AS end_percentage,
|
|
||||||
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
|
|
||||||
SUM(duration) AS duration
|
|
||||||
FROM intermediate_ga
|
|
||||||
GROUP BY group_leader
|
|
||||||
),
|
|
||||||
|
|
||||||
current_progress AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
COALESCE((
|
|
||||||
SELECT percentage
|
|
||||||
FROM document_progress AS dp
|
|
||||||
WHERE
|
|
||||||
dp.user_id = iga.user_id
|
|
||||||
AND dp.document_id = iga.document_id
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 1
|
|
||||||
), end_percentage) AS percentage
|
|
||||||
FROM intermediate_ga AS iga
|
|
||||||
GROUP BY user_id, document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
ga.document_id,
|
|
||||||
ga.user_id,
|
|
||||||
cp.percentage,
|
|
||||||
MAX(start_time) AS last_read,
|
|
||||||
SUM(read_percentage) AS read_percentage,
|
|
||||||
|
|
||||||
-- All Time WPM
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
AS total_words_read,
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
/ (SUM(duration) / 60.0) AS total_wpm,
|
|
||||||
|
|
||||||
-- Yearly WPM
|
|
||||||
SUM(CASE WHEN start_time >= DATE('now', '-1 year') THEN duration ELSE 0 END)
|
|
||||||
AS yearly_time_seconds,
|
|
||||||
(
|
|
||||||
CAST(COALESCE(d.words, 0.0) AS REAL)
|
|
||||||
* SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
|
|
||||||
ELSE 0
|
|
||||||
END
|
|
||||||
)
|
|
||||||
)
|
|
||||||
AS yearly_words_read,
|
|
||||||
COALESCE((
|
|
||||||
CAST(COALESCE(d.words, 0.0) AS REAL)
|
|
||||||
* SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
|
|
||||||
END
|
|
||||||
)
|
|
||||||
)
|
|
||||||
/ (
|
|
||||||
SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-1 year') THEN duration
|
|
||||||
END
|
|
||||||
)
|
|
||||||
/ 60.0
|
|
||||||
), 0.0)
|
|
||||||
AS yearly_wpm,
|
|
||||||
|
|
||||||
-- Monthly WPM
|
|
||||||
SUM(
|
|
||||||
CASE WHEN start_time >= DATE('now', '-1 month') THEN duration ELSE 0 END
|
|
||||||
)
|
|
||||||
AS monthly_time_seconds,
|
|
||||||
(
|
|
||||||
CAST(COALESCE(d.words, 0.0) AS REAL)
|
|
||||||
* SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
|
|
||||||
ELSE 0
|
|
||||||
END
|
|
||||||
)
|
|
||||||
)
|
|
||||||
AS monthly_words_read,
|
|
||||||
COALESCE((
|
|
||||||
CAST(COALESCE(d.words, 0.0) AS REAL)
|
|
||||||
* SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
|
|
||||||
END
|
|
||||||
)
|
|
||||||
)
|
|
||||||
/ (
|
|
||||||
SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-1 month') THEN duration
|
|
||||||
END
|
|
||||||
)
|
|
||||||
/ 60.0
|
|
||||||
), 0.0)
|
|
||||||
AS monthly_wpm,
|
|
||||||
|
|
||||||
-- Weekly WPM
|
|
||||||
SUM(CASE WHEN start_time >= DATE('now', '-7 days') THEN duration ELSE 0 END)
|
|
||||||
AS weekly_time_seconds,
|
|
||||||
(
|
|
||||||
CAST(COALESCE(d.words, 0.0) AS REAL)
|
|
||||||
* SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
|
|
||||||
ELSE 0
|
|
||||||
END
|
|
||||||
)
|
|
||||||
)
|
|
||||||
AS weekly_words_read,
|
|
||||||
COALESCE((
|
|
||||||
CAST(COALESCE(d.words, 0.0) AS REAL)
|
|
||||||
* SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
|
|
||||||
END
|
|
||||||
)
|
|
||||||
)
|
|
||||||
/ (
|
|
||||||
SUM(
|
|
||||||
CASE
|
|
||||||
WHEN start_time >= DATE('now', '-7 days') THEN duration
|
|
||||||
END
|
|
||||||
)
|
|
||||||
/ 60.0
|
|
||||||
), 0.0)
|
|
||||||
AS weekly_wpm
|
|
||||||
|
|
||||||
FROM grouped_activity AS ga
|
|
||||||
INNER JOIN
|
|
||||||
current_progress AS cp
|
|
||||||
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
|
||||||
INNER JOIN
|
|
||||||
documents AS d
|
|
||||||
ON ga.document_id = d.id
|
|
||||||
GROUP BY ga.document_id, ga.user_id
|
|
||||||
ORDER BY total_wpm DESC;
|
|
||||||
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
--------------------------- Triggers --------------------------
|
--------------------------- Triggers --------------------------
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
|
298
database/views.sql
Normal file
298
database/views.sql
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
---------------------------------------------------------------
|
||||||
|
---------------------------- Views ----------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
--------- User Streaks ---------
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
CREATE VIEW view_user_streaks AS
|
||||||
|
|
||||||
|
WITH document_windows AS (
|
||||||
|
SELECT
|
||||||
|
activity.user_id,
|
||||||
|
users.timezone,
|
||||||
|
DATE(
|
||||||
|
LOCAL_TIME(activity.start_time, users.timezone),
|
||||||
|
'weekday 0', '-7 day'
|
||||||
|
) AS weekly_read,
|
||||||
|
DATE(LOCAL_TIME(activity.start_time, users.timezone)) AS daily_read
|
||||||
|
FROM activity
|
||||||
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
|
GROUP BY activity.user_id, weekly_read, daily_read
|
||||||
|
),
|
||||||
|
weekly_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
timezone,
|
||||||
|
'WEEK' AS "window",
|
||||||
|
weekly_read AS read_window,
|
||||||
|
row_number() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY weekly_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, weekly_read
|
||||||
|
),
|
||||||
|
daily_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
timezone,
|
||||||
|
'DAY' AS "window",
|
||||||
|
daily_read AS read_window,
|
||||||
|
row_number() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY daily_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, daily_read
|
||||||
|
),
|
||||||
|
streaks AS (
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
timezone
|
||||||
|
FROM daily_partitions
|
||||||
|
GROUP BY
|
||||||
|
timezone,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || seqnum || ' day')
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
timezone
|
||||||
|
FROM weekly_partitions
|
||||||
|
GROUP BY
|
||||||
|
timezone,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
||||||
|
),
|
||||||
|
max_streak AS (
|
||||||
|
SELECT
|
||||||
|
MAX(streak) AS max_streak,
|
||||||
|
start_date AS max_streak_start_date,
|
||||||
|
end_date AS max_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
GROUP BY user_id, window
|
||||||
|
),
|
||||||
|
current_streak AS (
|
||||||
|
SELECT
|
||||||
|
streak AS current_streak,
|
||||||
|
start_date AS current_streak_start_date,
|
||||||
|
end_date AS current_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
WHERE CASE
|
||||||
|
WHEN window = "WEEK" THEN
|
||||||
|
DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-14 day') = current_streak_end_date
|
||||||
|
OR DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-7 day') = current_streak_end_date
|
||||||
|
WHEN window = "DAY" THEN
|
||||||
|
DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), '-1 day') = current_streak_end_date
|
||||||
|
OR DATE(LOCAL_TIME(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) = current_streak_end_date
|
||||||
|
END
|
||||||
|
GROUP BY user_id, window
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
max_streak.user_id,
|
||||||
|
max_streak.window,
|
||||||
|
IFNULL(max_streak, 0) AS max_streak,
|
||||||
|
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
||||||
|
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
||||||
|
IFNULL(current_streak, 0) AS current_streak,
|
||||||
|
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
|
||||||
|
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
|
||||||
|
FROM max_streak
|
||||||
|
LEFT JOIN current_streak ON
|
||||||
|
current_streak.user_id = max_streak.user_id
|
||||||
|
AND current_streak.window = max_streak.window;
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
------- Document Stats ---------
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
CREATE VIEW view_document_user_statistics AS
|
||||||
|
|
||||||
|
WITH intermediate_ga AS (
|
||||||
|
SELECT
|
||||||
|
ga1.id AS row_id,
|
||||||
|
ga1.user_id,
|
||||||
|
ga1.document_id,
|
||||||
|
ga1.duration,
|
||||||
|
ga1.start_time,
|
||||||
|
ga1.start_percentage,
|
||||||
|
ga1.end_percentage,
|
||||||
|
|
||||||
|
-- Find Overlapping Events (Assign Unique ID)
|
||||||
|
(
|
||||||
|
SELECT MIN(id)
|
||||||
|
FROM activity AS ga2
|
||||||
|
WHERE
|
||||||
|
ga1.document_id = ga2.document_id
|
||||||
|
AND ga1.user_id = ga2.user_id
|
||||||
|
AND ga1.start_percentage <= ga2.end_percentage
|
||||||
|
AND ga1.end_percentage >= ga2.start_percentage
|
||||||
|
) AS group_leader
|
||||||
|
FROM activity AS ga1
|
||||||
|
),
|
||||||
|
|
||||||
|
grouped_activity AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
document_id,
|
||||||
|
MAX(start_time) AS start_time,
|
||||||
|
MIN(start_percentage) AS start_percentage,
|
||||||
|
MAX(end_percentage) AS end_percentage,
|
||||||
|
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
|
||||||
|
SUM(duration) AS duration
|
||||||
|
FROM intermediate_ga
|
||||||
|
GROUP BY group_leader
|
||||||
|
),
|
||||||
|
|
||||||
|
current_progress AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
document_id,
|
||||||
|
COALESCE((
|
||||||
|
SELECT percentage
|
||||||
|
FROM document_progress AS dp
|
||||||
|
WHERE
|
||||||
|
dp.user_id = iga.user_id
|
||||||
|
AND dp.document_id = iga.document_id
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
), end_percentage) AS percentage
|
||||||
|
FROM intermediate_ga AS iga
|
||||||
|
GROUP BY user_id, document_id
|
||||||
|
HAVING MAX(start_time)
|
||||||
|
)
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
ga.document_id,
|
||||||
|
ga.user_id,
|
||||||
|
cp.percentage,
|
||||||
|
MAX(start_time) AS last_read,
|
||||||
|
SUM(read_percentage) AS read_percentage,
|
||||||
|
|
||||||
|
-- All Time WPM
|
||||||
|
SUM(duration) AS total_time_seconds,
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
||||||
|
AS total_words_read,
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
||||||
|
/ (SUM(duration) / 60.0) AS total_wpm,
|
||||||
|
|
||||||
|
-- Yearly WPM
|
||||||
|
SUM(CASE WHEN start_time >= DATE('now', '-1 year') THEN duration ELSE 0 END)
|
||||||
|
AS yearly_time_seconds,
|
||||||
|
(
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL)
|
||||||
|
* SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
AS yearly_words_read,
|
||||||
|
COALESCE((
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL)
|
||||||
|
* SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-1 year') THEN read_percentage
|
||||||
|
END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
/ (
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-1 year') THEN duration
|
||||||
|
END
|
||||||
|
)
|
||||||
|
/ 60.0
|
||||||
|
), 0.0)
|
||||||
|
AS yearly_wpm,
|
||||||
|
|
||||||
|
-- Monthly WPM
|
||||||
|
SUM(
|
||||||
|
CASE WHEN start_time >= DATE('now', '-1 month') THEN duration ELSE 0 END
|
||||||
|
)
|
||||||
|
AS monthly_time_seconds,
|
||||||
|
(
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL)
|
||||||
|
* SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
AS monthly_words_read,
|
||||||
|
COALESCE((
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL)
|
||||||
|
* SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-1 month') THEN read_percentage
|
||||||
|
END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
/ (
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-1 month') THEN duration
|
||||||
|
END
|
||||||
|
)
|
||||||
|
/ 60.0
|
||||||
|
), 0.0)
|
||||||
|
AS monthly_wpm,
|
||||||
|
|
||||||
|
-- Weekly WPM
|
||||||
|
SUM(CASE WHEN start_time >= DATE('now', '-7 days') THEN duration ELSE 0 END)
|
||||||
|
AS weekly_time_seconds,
|
||||||
|
(
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL)
|
||||||
|
* SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
AS weekly_words_read,
|
||||||
|
COALESCE((
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL)
|
||||||
|
* SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-7 days') THEN read_percentage
|
||||||
|
END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
/ (
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN start_time >= DATE('now', '-7 days') THEN duration
|
||||||
|
END
|
||||||
|
)
|
||||||
|
/ 60.0
|
||||||
|
), 0.0)
|
||||||
|
AS weekly_wpm
|
||||||
|
|
||||||
|
FROM grouped_activity AS ga
|
||||||
|
INNER JOIN
|
||||||
|
current_progress AS cp
|
||||||
|
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
||||||
|
INNER JOIN
|
||||||
|
documents AS d
|
||||||
|
ON ga.document_id = d.id
|
||||||
|
GROUP BY ga.document_id, ga.user_id
|
||||||
|
ORDER BY total_wpm DESC;
|
||||||
|
|
||||||
|
|
@ -120,7 +120,7 @@ sql:
|
|||||||
go_type:
|
go_type:
|
||||||
type: "string"
|
type: "string"
|
||||||
pointer: true
|
pointer: true
|
||||||
- column: "users.time_offset"
|
- column: "users.timezone"
|
||||||
go_type:
|
go_type:
|
||||||
type: "string"
|
type: "string"
|
||||||
pointer: true
|
pointer: true
|
||||||
|
@ -9,8 +9,13 @@
|
|||||||
action="./users"
|
action="./users"
|
||||||
class="flex flex-col gap-2 text-black dark:text-white text-sm">
|
class="flex flex-col gap-2 text-black dark:text-white text-sm">
|
||||||
<input type="text"
|
<input type="text"
|
||||||
id="username"
|
id="operation"
|
||||||
name="username"
|
name="operation"
|
||||||
|
value="CREATE"
|
||||||
|
class="hidden" />
|
||||||
|
<input type="text"
|
||||||
|
id="user"
|
||||||
|
name="user"
|
||||||
placeholder="User"
|
placeholder="User"
|
||||||
class="p-2 bg-gray-300 text-black dark:bg-gray-700 dark:text-white" />
|
class="p-2 bg-gray-300 text-black dark:bg-gray-700 dark:text-white" />
|
||||||
<input type="password"
|
<input type="password"
|
||||||
|
@ -51,7 +51,7 @@
|
|||||||
{{ end }}
|
{{ end }}
|
||||||
</div>
|
</div>
|
||||||
<div class="flex flex-col grow gap-2 p-4 rounded shadow-lg bg-white dark:bg-gray-700 text-gray-500 dark:text-white">
|
<div class="flex flex-col grow gap-2 p-4 rounded shadow-lg bg-white dark:bg-gray-700 text-gray-500 dark:text-white">
|
||||||
<p class="text-lg font-semibold mb-2">Change Time Offset</p>
|
<p class="text-lg font-semibold mb-2">Change Timezone</p>
|
||||||
<form class="flex gap-4 flex-col lg:flex-row"
|
<form class="flex gap-4 flex-col lg:flex-row"
|
||||||
action="./settings"
|
action="./settings"
|
||||||
method="POST">
|
method="POST">
|
||||||
@ -60,11 +60,11 @@
|
|||||||
{{ template "svg/clock" (dict "Size" 15) }}
|
{{ template "svg/clock" (dict "Size" 15) }}
|
||||||
</span>
|
</span>
|
||||||
<select class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
|
<select class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
|
||||||
id="time_offset"
|
id="timezone"
|
||||||
name="time_offset">
|
name="timezone">
|
||||||
{{ range $item := getUTCOffsets }}
|
{{ range $item := getTimeZones }}
|
||||||
<option {{ if (eq $item.Value $.Data.TimeOffset) }}selected{{ end }} value="{{ $item.Value }}">
|
<option {{ if (eq $item $.Data.Timezone) }}selected{{ end }} value="{{ $item }}">
|
||||||
{{ $item.Name }}
|
{{ $item }}
|
||||||
</option>
|
</option>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
</select>
|
</select>
|
||||||
|
Loading…
Reference in New Issue
Block a user