2023-09-18 23:57:18 +00:00
|
|
|
// Code generated by sqlc. DO NOT EDIT.
|
|
|
|
// versions:
|
|
|
|
// sqlc v1.21.0
|
|
|
|
// source: query.sql
|
|
|
|
|
|
|
|
package database
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"database/sql"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
const addActivity = `-- name: AddActivity :one
|
2023-10-03 20:47:38 +00:00
|
|
|
INSERT INTO raw_activity (
|
2023-09-18 23:57:18 +00:00
|
|
|
user_id,
|
|
|
|
document_id,
|
|
|
|
device_id,
|
|
|
|
start_time,
|
|
|
|
duration,
|
2023-10-03 11:37:14 +00:00
|
|
|
page,
|
|
|
|
pages
|
2023-09-18 23:57:18 +00:00
|
|
|
)
|
|
|
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
2023-10-03 20:47:38 +00:00
|
|
|
RETURNING id, user_id, document_id, device_id, start_time, page, pages, duration, created_at
|
2023-09-18 23:57:18 +00:00
|
|
|
`
|
|
|
|
|
|
|
|
type AddActivityParams struct {
|
2023-10-03 11:37:14 +00:00
|
|
|
UserID string `json:"user_id"`
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
DeviceID string `json:"device_id"`
|
|
|
|
StartTime time.Time `json:"start_time"`
|
|
|
|
Duration int64 `json:"duration"`
|
|
|
|
Page int64 `json:"page"`
|
|
|
|
Pages int64 `json:"pages"`
|
2023-09-18 23:57:18 +00:00
|
|
|
}
|
|
|
|
|
2023-10-03 20:47:38 +00:00
|
|
|
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawActivity, error) {
|
2023-09-18 23:57:18 +00:00
|
|
|
row := q.db.QueryRowContext(ctx, addActivity,
|
|
|
|
arg.UserID,
|
|
|
|
arg.DocumentID,
|
|
|
|
arg.DeviceID,
|
|
|
|
arg.StartTime,
|
|
|
|
arg.Duration,
|
2023-10-03 11:37:14 +00:00
|
|
|
arg.Page,
|
|
|
|
arg.Pages,
|
2023-09-18 23:57:18 +00:00
|
|
|
)
|
2023-10-03 20:47:38 +00:00
|
|
|
var i RawActivity
|
2023-09-18 23:57:18 +00:00
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.UserID,
|
|
|
|
&i.DocumentID,
|
|
|
|
&i.DeviceID,
|
|
|
|
&i.StartTime,
|
2023-10-03 11:37:14 +00:00
|
|
|
&i.Page,
|
|
|
|
&i.Pages,
|
2023-10-03 20:47:38 +00:00
|
|
|
&i.Duration,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
2023-09-23 18:14:57 +00:00
|
|
|
const addMetadata = `-- name: AddMetadata :one
|
|
|
|
INSERT INTO metadata (
|
|
|
|
document_id,
|
|
|
|
title,
|
|
|
|
author,
|
|
|
|
description,
|
|
|
|
gbid,
|
|
|
|
olid,
|
|
|
|
isbn10,
|
|
|
|
isbn13
|
|
|
|
)
|
|
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
|
|
RETURNING id, document_id, title, author, description, gbid, olid, isbn10, isbn13, created_at
|
|
|
|
`
|
|
|
|
|
|
|
|
type AddMetadataParams struct {
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
Title *string `json:"title"`
|
|
|
|
Author *string `json:"author"`
|
|
|
|
Description *string `json:"description"`
|
|
|
|
Gbid *string `json:"gbid"`
|
|
|
|
Olid *string `json:"olid"`
|
|
|
|
Isbn10 *string `json:"isbn10"`
|
|
|
|
Isbn13 *string `json:"isbn13"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metadatum, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, addMetadata,
|
|
|
|
arg.DocumentID,
|
|
|
|
arg.Title,
|
|
|
|
arg.Author,
|
|
|
|
arg.Description,
|
|
|
|
arg.Gbid,
|
|
|
|
arg.Olid,
|
|
|
|
arg.Isbn10,
|
|
|
|
arg.Isbn13,
|
|
|
|
)
|
|
|
|
var i Metadatum
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.DocumentID,
|
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Description,
|
|
|
|
&i.Gbid,
|
|
|
|
&i.Olid,
|
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
2023-09-18 23:57:18 +00:00
|
|
|
const createUser = `-- name: CreateUser :execrows
|
|
|
|
INSERT INTO users (id, pass)
|
|
|
|
VALUES (?, ?)
|
|
|
|
ON CONFLICT DO NOTHING
|
|
|
|
`
|
|
|
|
|
|
|
|
type CreateUserParams struct {
|
2023-09-27 22:58:47 +00:00
|
|
|
ID string `json:"id"`
|
|
|
|
Pass *string `json:"-"`
|
2023-09-18 23:57:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) CreateUser(ctx context.Context, arg CreateUserParams) (int64, error) {
|
|
|
|
result, err := q.db.ExecContext(ctx, createUser, arg.ID, arg.Pass)
|
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
return result.RowsAffected()
|
|
|
|
}
|
|
|
|
|
|
|
|
const deleteDocument = `-- name: DeleteDocument :execrows
|
|
|
|
UPDATE documents
|
|
|
|
SET
|
|
|
|
deleted = 1
|
|
|
|
WHERE id = ?1
|
|
|
|
`
|
|
|
|
|
|
|
|
func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error) {
|
|
|
|
result, err := q.db.ExecContext(ctx, deleteDocument, id)
|
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
return result.RowsAffected()
|
|
|
|
}
|
|
|
|
|
|
|
|
const getActivity = `-- name: GetActivity :many
|
2023-09-21 00:35:01 +00:00
|
|
|
SELECT
|
|
|
|
document_id,
|
2023-10-03 20:47:38 +00:00
|
|
|
CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
2023-09-21 00:35:01 +00:00
|
|
|
title,
|
|
|
|
author,
|
|
|
|
duration,
|
2023-10-03 11:37:14 +00:00
|
|
|
page,
|
|
|
|
pages
|
2023-09-21 00:35:01 +00:00
|
|
|
FROM activity
|
|
|
|
LEFT JOIN documents ON documents.id = activity.document_id
|
|
|
|
LEFT JOIN users ON users.id = activity.user_id
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE
|
2023-09-21 00:35:01 +00:00
|
|
|
activity.user_id = ?1
|
2023-09-18 23:57:18 +00:00
|
|
|
AND (
|
2023-10-03 20:47:38 +00:00
|
|
|
(
|
|
|
|
CAST(?2 AS BOOLEAN) = TRUE
|
|
|
|
AND document_id = ?3
|
|
|
|
) OR ?2 = FALSE
|
2023-09-18 23:57:18 +00:00
|
|
|
)
|
2023-10-03 20:47:38 +00:00
|
|
|
ORDER BY activity.start_time DESC
|
2023-09-18 23:57:18 +00:00
|
|
|
LIMIT ?5
|
|
|
|
OFFSET ?4
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetActivityParams struct {
|
2023-09-21 00:35:01 +00:00
|
|
|
UserID string `json:"user_id"`
|
|
|
|
DocFilter bool `json:"doc_filter"`
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
Offset int64 `json:"offset"`
|
|
|
|
Limit int64 `json:"limit"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type GetActivityRow struct {
|
2023-10-03 11:37:14 +00:00
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
StartTime string `json:"start_time"`
|
|
|
|
Title *string `json:"title"`
|
|
|
|
Author *string `json:"author"`
|
|
|
|
Duration int64 `json:"duration"`
|
|
|
|
Page int64 `json:"page"`
|
|
|
|
Pages int64 `json:"pages"`
|
2023-09-18 23:57:18 +00:00
|
|
|
}
|
|
|
|
|
2023-09-21 00:35:01 +00:00
|
|
|
func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]GetActivityRow, error) {
|
2023-09-18 23:57:18 +00:00
|
|
|
rows, err := q.db.QueryContext(ctx, getActivity,
|
|
|
|
arg.UserID,
|
|
|
|
arg.DocFilter,
|
|
|
|
arg.DocumentID,
|
|
|
|
arg.Offset,
|
|
|
|
arg.Limit,
|
|
|
|
)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
2023-09-21 00:35:01 +00:00
|
|
|
var items []GetActivityRow
|
2023-09-18 23:57:18 +00:00
|
|
|
for rows.Next() {
|
2023-09-21 00:35:01 +00:00
|
|
|
var i GetActivityRow
|
2023-09-18 23:57:18 +00:00
|
|
|
if err := rows.Scan(
|
|
|
|
&i.DocumentID,
|
|
|
|
&i.StartTime,
|
2023-09-21 00:35:01 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Duration,
|
2023-10-03 11:37:14 +00:00
|
|
|
&i.Page,
|
|
|
|
&i.Pages,
|
2023-09-18 23:57:18 +00:00
|
|
|
); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
2023-09-21 00:35:01 +00:00
|
|
|
WITH RECURSIVE last_30_days AS (
|
|
|
|
SELECT DATE('now', time_offset) AS date
|
|
|
|
FROM users WHERE users.id = ?1
|
2023-09-18 23:57:18 +00:00
|
|
|
UNION ALL
|
2023-09-19 23:29:55 +00:00
|
|
|
SELECT DATE(date, '-1 days')
|
2023-09-18 23:57:18 +00:00
|
|
|
FROM last_30_days
|
|
|
|
LIMIT 30
|
|
|
|
),
|
|
|
|
activity_records AS (
|
|
|
|
SELECT
|
2023-10-03 11:37:14 +00:00
|
|
|
SUM(duration) AS seconds_read,
|
2023-09-21 00:35:01 +00:00
|
|
|
DATE(start_time, time_offset) AS day
|
2023-09-18 23:57:18 +00:00
|
|
|
FROM activity
|
2023-09-21 00:35:01 +00:00
|
|
|
LEFT JOIN users ON users.id = activity.user_id
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE user_id = ?1
|
2023-09-23 02:12:36 +00:00
|
|
|
AND start_time > DATE('now', '-31 days')
|
2023-09-18 23:57:18 +00:00
|
|
|
GROUP BY day
|
|
|
|
ORDER BY day DESC
|
|
|
|
LIMIT 30
|
|
|
|
)
|
|
|
|
SELECT
|
|
|
|
CAST(date AS TEXT),
|
|
|
|
CAST(CASE
|
|
|
|
WHEN seconds_read IS NULL THEN 0
|
|
|
|
ELSE seconds_read / 60
|
|
|
|
END AS INTEGER) AS minutes_read
|
|
|
|
FROM last_30_days
|
|
|
|
LEFT JOIN activity_records ON activity_records.day == last_30_days.date
|
|
|
|
ORDER BY date DESC
|
|
|
|
LIMIT 30
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetDailyReadStatsRow struct {
|
|
|
|
Date string `json:"date"`
|
|
|
|
MinutesRead int64 `json:"minutes_read"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDailyReadStats(ctx context.Context, userID string) ([]GetDailyReadStatsRow, error) {
|
|
|
|
rows, err := q.db.QueryContext(ctx, getDailyReadStats, userID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []GetDailyReadStatsRow
|
|
|
|
for rows.Next() {
|
|
|
|
var i GetDailyReadStatsRow
|
|
|
|
if err := rows.Scan(&i.Date, &i.MinutesRead); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDatabaseInfo = `-- name: GetDatabaseInfo :one
|
|
|
|
SELECT
|
2023-10-03 11:37:14 +00:00
|
|
|
(SELECT COUNT(rowid) FROM activity WHERE activity.user_id = ?1) AS activity_size,
|
|
|
|
(SELECT COUNT(rowid) FROM documents) AS documents_size,
|
|
|
|
(SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = ?1) AS progress_size,
|
|
|
|
(SELECT COUNT(rowid) FROM devices WHERE devices.user_id = ?1) AS devices_size
|
2023-09-18 23:57:18 +00:00
|
|
|
LIMIT 1
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetDatabaseInfoRow struct {
|
|
|
|
ActivitySize int64 `json:"activity_size"`
|
|
|
|
DocumentsSize int64 `json:"documents_size"`
|
|
|
|
ProgressSize int64 `json:"progress_size"`
|
|
|
|
DevicesSize int64 `json:"devices_size"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDatabaseInfo(ctx context.Context, userID string) (GetDatabaseInfoRow, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getDatabaseInfo, userID)
|
|
|
|
var i GetDatabaseInfoRow
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ActivitySize,
|
|
|
|
&i.DocumentsSize,
|
|
|
|
&i.ProgressSize,
|
|
|
|
&i.DevicesSize,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDeletedDocuments = `-- name: GetDeletedDocuments :many
|
|
|
|
SELECT documents.id
|
|
|
|
FROM documents
|
|
|
|
WHERE
|
|
|
|
documents.deleted = true
|
|
|
|
AND documents.id IN (/*SLICE:document_ids*/?)
|
|
|
|
`
|
|
|
|
|
|
|
|
func (q *Queries) GetDeletedDocuments(ctx context.Context, documentIds []string) ([]string, error) {
|
|
|
|
query := getDeletedDocuments
|
|
|
|
var queryParams []interface{}
|
|
|
|
if len(documentIds) > 0 {
|
|
|
|
for _, v := range documentIds {
|
|
|
|
queryParams = append(queryParams, v)
|
|
|
|
}
|
|
|
|
query = strings.Replace(query, "/*SLICE:document_ids*/?", strings.Repeat(",?", len(documentIds))[1:], 1)
|
|
|
|
} else {
|
|
|
|
query = strings.Replace(query, "/*SLICE:document_ids*/?", "NULL", 1)
|
|
|
|
}
|
|
|
|
rows, err := q.db.QueryContext(ctx, query, queryParams...)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []string
|
|
|
|
for rows.Next() {
|
|
|
|
var id string
|
|
|
|
if err := rows.Scan(&id); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, id)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDevice = `-- name: GetDevice :one
|
|
|
|
SELECT id, user_id, device_name, created_at, sync FROM devices
|
|
|
|
WHERE id = ?1 LIMIT 1
|
|
|
|
`
|
|
|
|
|
|
|
|
func (q *Queries) GetDevice(ctx context.Context, deviceID string) (Device, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getDevice, deviceID)
|
|
|
|
var i Device
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.UserID,
|
|
|
|
&i.DeviceName,
|
|
|
|
&i.CreatedAt,
|
|
|
|
&i.Sync,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDevices = `-- name: GetDevices :many
|
2023-09-27 22:58:47 +00:00
|
|
|
SELECT
|
|
|
|
devices.device_name,
|
|
|
|
CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
|
|
|
CAST(DATETIME(MAX(activity.created_at), users.time_offset) AS TEXT) AS last_sync
|
|
|
|
FROM activity
|
|
|
|
JOIN devices ON devices.id = activity.device_id
|
|
|
|
JOIN users ON users.id = ?1
|
|
|
|
WHERE devices.user_id = ?1
|
|
|
|
GROUP BY activity.device_id
|
2023-09-18 23:57:18 +00:00
|
|
|
`
|
|
|
|
|
2023-09-27 22:58:47 +00:00
|
|
|
type GetDevicesRow struct {
|
|
|
|
DeviceName string `json:"device_name"`
|
|
|
|
CreatedAt string `json:"created_at"`
|
|
|
|
LastSync string `json:"last_sync"`
|
2023-09-18 23:57:18 +00:00
|
|
|
}
|
|
|
|
|
2023-09-27 22:58:47 +00:00
|
|
|
func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRow, error) {
|
|
|
|
rows, err := q.db.QueryContext(ctx, getDevices, userID)
|
2023-09-18 23:57:18 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
2023-09-27 22:58:47 +00:00
|
|
|
var items []GetDevicesRow
|
2023-09-18 23:57:18 +00:00
|
|
|
for rows.Next() {
|
2023-09-27 22:58:47 +00:00
|
|
|
var i GetDevicesRow
|
|
|
|
if err := rows.Scan(&i.DeviceName, &i.CreatedAt, &i.LastSync); err != nil {
|
2023-09-18 23:57:18 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDocument = `-- name: GetDocument :one
|
2023-10-01 23:17:22 +00:00
|
|
|
SELECT id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE id = ?1 LIMIT 1
|
|
|
|
`
|
|
|
|
|
|
|
|
func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getDocument, documentID)
|
|
|
|
var i Document
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDocumentDaysRead = `-- name: GetDocumentDaysRead :one
|
|
|
|
WITH document_days AS (
|
2023-09-21 00:35:01 +00:00
|
|
|
SELECT DATE(start_time, time_offset) AS dates
|
2023-10-03 11:37:14 +00:00
|
|
|
FROM activity
|
|
|
|
JOIN users ON users.id = activity.user_id
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE document_id = ?1
|
|
|
|
AND user_id = ?2
|
|
|
|
GROUP BY dates
|
|
|
|
)
|
2023-10-03 11:37:14 +00:00
|
|
|
SELECT CAST(COUNT(*) AS INTEGER) AS days_read
|
2023-09-18 23:57:18 +00:00
|
|
|
FROM document_days
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetDocumentDaysReadParams struct {
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDocumentDaysRead(ctx context.Context, arg GetDocumentDaysReadParams) (int64, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getDocumentDaysRead, arg.DocumentID, arg.UserID)
|
|
|
|
var days_read int64
|
|
|
|
err := row.Scan(&days_read)
|
|
|
|
return days_read, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDocumentReadStats = `-- name: GetDocumentReadStats :one
|
|
|
|
SELECT
|
2023-10-03 11:37:14 +00:00
|
|
|
COUNT(DISTINCT page) AS pages_read,
|
|
|
|
SUM(duration) AS total_time
|
2023-10-03 20:47:38 +00:00
|
|
|
FROM activity
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE document_id = ?1
|
|
|
|
AND user_id = ?2
|
|
|
|
AND start_time >= ?3
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetDocumentReadStatsParams struct {
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
StartTime time.Time `json:"start_time"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type GetDocumentReadStatsRow struct {
|
|
|
|
PagesRead int64 `json:"pages_read"`
|
|
|
|
TotalTime sql.NullFloat64 `json:"total_time"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadStatsParams) (GetDocumentReadStatsRow, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getDocumentReadStats, arg.DocumentID, arg.UserID, arg.StartTime)
|
|
|
|
var i GetDocumentReadStatsRow
|
|
|
|
err := row.Scan(&i.PagesRead, &i.TotalTime)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one
|
|
|
|
WITH capped_stats AS (
|
2023-10-03 11:37:14 +00:00
|
|
|
SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations
|
2023-10-03 20:47:38 +00:00
|
|
|
FROM activity
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE document_id = ?2
|
|
|
|
AND user_id = ?3
|
|
|
|
AND start_time >= ?4
|
|
|
|
GROUP BY page
|
|
|
|
)
|
|
|
|
SELECT
|
2023-10-03 11:37:14 +00:00
|
|
|
CAST(COUNT(*) AS INTEGER) AS pages_read,
|
|
|
|
CAST(SUM(durations) AS INTEGER) AS total_time
|
2023-09-18 23:57:18 +00:00
|
|
|
FROM capped_stats
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetDocumentReadStatsCappedParams struct {
|
|
|
|
PageDurationCap int64 `json:"page_duration_cap"`
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
StartTime time.Time `json:"start_time"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type GetDocumentReadStatsCappedRow struct {
|
|
|
|
PagesRead int64 `json:"pages_read"`
|
|
|
|
TotalTime int64 `json:"total_time"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDocumentReadStatsCapped(ctx context.Context, arg GetDocumentReadStatsCappedParams) (GetDocumentReadStatsCappedRow, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getDocumentReadStatsCapped,
|
|
|
|
arg.PageDurationCap,
|
|
|
|
arg.DocumentID,
|
|
|
|
arg.UserID,
|
|
|
|
arg.StartTime,
|
|
|
|
)
|
|
|
|
var i GetDocumentReadStatsCappedRow
|
|
|
|
err := row.Scan(&i.PagesRead, &i.TotalTime)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
2023-09-23 02:12:36 +00:00
|
|
|
const getDocumentWithStats = `-- name: GetDocumentWithStats :one
|
|
|
|
WITH true_progress AS (
|
|
|
|
SELECT
|
|
|
|
start_time AS last_read,
|
2023-10-01 23:17:22 +00:00
|
|
|
SUM(duration) AS total_time_seconds,
|
2023-09-23 02:12:36 +00:00
|
|
|
document_id,
|
2023-10-03 11:37:14 +00:00
|
|
|
page,
|
|
|
|
pages,
|
2023-10-01 23:17:22 +00:00
|
|
|
|
2023-10-03 20:47:38 +00:00
|
|
|
-- Determine Read Pages
|
|
|
|
COUNT(DISTINCT page) AS read_pages,
|
2023-10-01 23:17:22 +00:00
|
|
|
|
2023-10-03 20:47:38 +00:00
|
|
|
-- Derive Percentage of Book
|
2023-10-03 11:37:14 +00:00
|
|
|
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
2023-10-03 20:47:38 +00:00
|
|
|
FROM activity
|
2023-09-23 02:12:36 +00:00
|
|
|
WHERE user_id = ?1
|
|
|
|
AND document_id = ?2
|
|
|
|
GROUP BY document_id
|
|
|
|
HAVING MAX(start_time)
|
|
|
|
LIMIT 1
|
|
|
|
)
|
|
|
|
SELECT
|
2023-10-01 23:17:22 +00:00
|
|
|
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
2023-09-23 02:12:36 +00:00
|
|
|
|
2023-10-03 11:37:14 +00:00
|
|
|
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
|
|
|
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
2023-10-01 23:17:22 +00:00
|
|
|
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
2023-09-23 02:12:36 +00:00
|
|
|
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
2023-10-01 23:17:22 +00:00
|
|
|
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
|
2023-09-23 02:12:36 +00:00
|
|
|
|
2023-10-01 23:17:22 +00:00
|
|
|
-- Calculate Seconds / Page
|
|
|
|
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
|
|
|
|
-- 2. Divide by Read Pages (Distinct Pages in Activity)
|
2023-09-23 02:12:36 +00:00
|
|
|
CAST(CASE
|
2023-10-03 20:47:38 +00:00
|
|
|
WHEN total_time_seconds IS NULL THEN 0.0
|
|
|
|
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
|
2023-10-01 23:17:22 +00:00
|
|
|
END AS INTEGER) AS seconds_per_page,
|
|
|
|
|
|
|
|
-- Arbitrarily >97% is Complete
|
|
|
|
CAST(CASE
|
2023-10-03 20:47:38 +00:00
|
|
|
WHEN percentage > 97.0 THEN 100.0
|
|
|
|
WHEN percentage IS NULL THEN 0.0
|
|
|
|
ELSE percentage
|
2023-09-23 02:12:36 +00:00
|
|
|
END AS REAL) AS percentage
|
|
|
|
|
|
|
|
FROM documents
|
|
|
|
LEFT JOIN true_progress ON true_progress.document_id = documents.id
|
|
|
|
LEFT JOIN users ON users.id = ?1
|
|
|
|
WHERE documents.id = ?2
|
|
|
|
ORDER BY true_progress.last_read DESC, documents.created_at DESC
|
|
|
|
LIMIT 1
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetDocumentWithStatsParams struct {
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type GetDocumentWithStatsRow struct {
|
|
|
|
ID string `json:"id"`
|
|
|
|
Md5 *string `json:"md5"`
|
|
|
|
Filepath *string `json:"filepath"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Coverfile *string `json:"coverfile"`
|
2023-09-23 02:12:36 +00:00
|
|
|
Title *string `json:"title"`
|
|
|
|
Author *string `json:"author"`
|
|
|
|
Series *string `json:"series"`
|
|
|
|
SeriesIndex *int64 `json:"series_index"`
|
|
|
|
Lang *string `json:"lang"`
|
|
|
|
Description *string `json:"description"`
|
2023-10-01 23:17:22 +00:00
|
|
|
Words *int64 `json:"words"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Gbid *string `json:"gbid"`
|
2023-09-23 02:12:36 +00:00
|
|
|
Olid *string `json:"-"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Isbn10 *string `json:"isbn10"`
|
|
|
|
Isbn13 *string `json:"isbn13"`
|
2023-09-23 02:12:36 +00:00
|
|
|
Synced bool `json:"-"`
|
|
|
|
Deleted bool `json:"-"`
|
|
|
|
UpdatedAt time.Time `json:"updated_at"`
|
|
|
|
CreatedAt time.Time `json:"created_at"`
|
2023-10-03 11:37:14 +00:00
|
|
|
Page int64 `json:"page"`
|
|
|
|
Pages int64 `json:"pages"`
|
2023-10-01 23:17:22 +00:00
|
|
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
2023-09-23 02:12:36 +00:00
|
|
|
LastRead string `json:"last_read"`
|
2023-10-01 23:17:22 +00:00
|
|
|
ReadPages int64 `json:"read_pages"`
|
|
|
|
SecondsPerPage int64 `json:"seconds_per_page"`
|
2023-09-23 02:12:36 +00:00
|
|
|
Percentage float64 `json:"percentage"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithStatsParams) (GetDocumentWithStatsRow, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getDocumentWithStats, arg.UserID, arg.DocumentID)
|
|
|
|
var i GetDocumentWithStatsRow
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-23 02:12:36 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-23 02:12:36 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-23 02:12:36 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
2023-10-03 11:37:14 +00:00
|
|
|
&i.Page,
|
|
|
|
&i.Pages,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.TotalTimeSeconds,
|
2023-09-23 02:12:36 +00:00
|
|
|
&i.LastRead,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.ReadPages,
|
|
|
|
&i.SecondsPerPage,
|
2023-09-23 02:12:36 +00:00
|
|
|
&i.Percentage,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
2023-09-18 23:57:18 +00:00
|
|
|
const getDocuments = `-- name: GetDocuments :many
|
2023-10-01 23:17:22 +00:00
|
|
|
SELECT id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
2023-09-18 23:57:18 +00:00
|
|
|
ORDER BY created_at DESC
|
|
|
|
LIMIT ?2
|
|
|
|
OFFSET ?1
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetDocumentsParams struct {
|
|
|
|
Offset int64 `json:"offset"`
|
|
|
|
Limit int64 `json:"limit"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDocuments(ctx context.Context, arg GetDocumentsParams) ([]Document, error) {
|
|
|
|
rows, err := q.db.QueryContext(ctx, getDocuments, arg.Offset, arg.Limit)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []Document
|
|
|
|
for rows.Next() {
|
|
|
|
var i Document
|
|
|
|
if err := rows.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
|
|
|
); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const getDocumentsWithStats = `-- name: GetDocumentsWithStats :many
|
|
|
|
WITH true_progress AS (
|
|
|
|
SELECT
|
|
|
|
start_time AS last_read,
|
2023-10-01 23:17:22 +00:00
|
|
|
SUM(duration) AS total_time_seconds,
|
2023-09-18 23:57:18 +00:00
|
|
|
document_id,
|
2023-10-03 11:37:14 +00:00
|
|
|
page,
|
|
|
|
pages,
|
|
|
|
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
2023-09-18 23:57:18 +00:00
|
|
|
FROM activity
|
2023-09-21 00:35:01 +00:00
|
|
|
WHERE user_id = ?1
|
2023-09-18 23:57:18 +00:00
|
|
|
GROUP BY document_id
|
|
|
|
HAVING MAX(start_time)
|
|
|
|
)
|
|
|
|
SELECT
|
2023-10-01 23:17:22 +00:00
|
|
|
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
2023-09-18 23:57:18 +00:00
|
|
|
|
2023-10-03 11:37:14 +00:00
|
|
|
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
|
|
|
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
2023-10-01 23:17:22 +00:00
|
|
|
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
2023-09-21 00:35:01 +00:00
|
|
|
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
2023-09-18 23:57:18 +00:00
|
|
|
|
|
|
|
CAST(CASE
|
|
|
|
WHEN percentage > 97.0 THEN 100.0
|
|
|
|
WHEN percentage IS NULL THEN 0.0
|
|
|
|
ELSE percentage
|
|
|
|
END AS REAL) AS percentage
|
|
|
|
|
|
|
|
FROM documents
|
2023-09-21 00:35:01 +00:00
|
|
|
LEFT JOIN true_progress ON true_progress.document_id = documents.id
|
|
|
|
LEFT JOIN users ON users.id = ?1
|
2023-09-23 18:14:57 +00:00
|
|
|
WHERE documents.deleted == false
|
2023-09-21 00:35:01 +00:00
|
|
|
ORDER BY true_progress.last_read DESC, documents.created_at DESC
|
|
|
|
LIMIT ?3
|
|
|
|
OFFSET ?2
|
2023-09-18 23:57:18 +00:00
|
|
|
`
|
|
|
|
|
|
|
|
type GetDocumentsWithStatsParams struct {
|
2023-09-21 00:35:01 +00:00
|
|
|
UserID string `json:"user_id"`
|
2023-09-18 23:57:18 +00:00
|
|
|
Offset int64 `json:"offset"`
|
|
|
|
Limit int64 `json:"limit"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type GetDocumentsWithStatsRow struct {
|
|
|
|
ID string `json:"id"`
|
|
|
|
Md5 *string `json:"md5"`
|
|
|
|
Filepath *string `json:"filepath"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Coverfile *string `json:"coverfile"`
|
2023-09-18 23:57:18 +00:00
|
|
|
Title *string `json:"title"`
|
|
|
|
Author *string `json:"author"`
|
|
|
|
Series *string `json:"series"`
|
|
|
|
SeriesIndex *int64 `json:"series_index"`
|
|
|
|
Lang *string `json:"lang"`
|
|
|
|
Description *string `json:"description"`
|
2023-10-01 23:17:22 +00:00
|
|
|
Words *int64 `json:"words"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Gbid *string `json:"gbid"`
|
2023-09-18 23:57:18 +00:00
|
|
|
Olid *string `json:"-"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Isbn10 *string `json:"isbn10"`
|
|
|
|
Isbn13 *string `json:"isbn13"`
|
2023-09-18 23:57:18 +00:00
|
|
|
Synced bool `json:"-"`
|
|
|
|
Deleted bool `json:"-"`
|
|
|
|
UpdatedAt time.Time `json:"updated_at"`
|
|
|
|
CreatedAt time.Time `json:"created_at"`
|
2023-10-03 11:37:14 +00:00
|
|
|
Page int64 `json:"page"`
|
|
|
|
Pages int64 `json:"pages"`
|
2023-10-01 23:17:22 +00:00
|
|
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
2023-09-18 23:57:18 +00:00
|
|
|
LastRead string `json:"last_read"`
|
|
|
|
Percentage float64 `json:"percentage"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) {
|
2023-09-21 00:35:01 +00:00
|
|
|
rows, err := q.db.QueryContext(ctx, getDocumentsWithStats, arg.UserID, arg.Offset, arg.Limit)
|
2023-09-18 23:57:18 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []GetDocumentsWithStatsRow
|
|
|
|
for rows.Next() {
|
|
|
|
var i GetDocumentsWithStatsRow
|
|
|
|
if err := rows.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
2023-10-03 11:37:14 +00:00
|
|
|
&i.Page,
|
|
|
|
&i.Pages,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.TotalTimeSeconds,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.LastRead,
|
|
|
|
&i.Percentage,
|
|
|
|
); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const getLastActivity = `-- name: GetLastActivity :one
|
|
|
|
SELECT start_time
|
|
|
|
FROM activity
|
|
|
|
WHERE device_id = ?1
|
|
|
|
AND user_id = ?2
|
|
|
|
ORDER BY start_time DESC LIMIT 1
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetLastActivityParams struct {
|
|
|
|
DeviceID string `json:"device_id"`
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetLastActivity(ctx context.Context, arg GetLastActivityParams) (time.Time, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getLastActivity, arg.DeviceID, arg.UserID)
|
|
|
|
var start_time time.Time
|
|
|
|
err := row.Scan(&start_time)
|
|
|
|
return start_time, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const getMissingDocuments = `-- name: GetMissingDocuments :many
|
2023-10-01 23:17:22 +00:00
|
|
|
SELECT documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at FROM documents
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE
|
|
|
|
documents.filepath IS NOT NULL
|
|
|
|
AND documents.deleted = false
|
|
|
|
AND documents.id NOT IN (/*SLICE:document_ids*/?)
|
|
|
|
`
|
|
|
|
|
|
|
|
func (q *Queries) GetMissingDocuments(ctx context.Context, documentIds []string) ([]Document, error) {
|
|
|
|
query := getMissingDocuments
|
|
|
|
var queryParams []interface{}
|
|
|
|
if len(documentIds) > 0 {
|
|
|
|
for _, v := range documentIds {
|
|
|
|
queryParams = append(queryParams, v)
|
|
|
|
}
|
|
|
|
query = strings.Replace(query, "/*SLICE:document_ids*/?", strings.Repeat(",?", len(documentIds))[1:], 1)
|
|
|
|
} else {
|
|
|
|
query = strings.Replace(query, "/*SLICE:document_ids*/?", "NULL", 1)
|
|
|
|
}
|
|
|
|
rows, err := q.db.QueryContext(ctx, query, queryParams...)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []Document
|
|
|
|
for rows.Next() {
|
|
|
|
var i Document
|
|
|
|
if err := rows.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
|
|
|
); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const getProgress = `-- name: GetProgress :one
|
|
|
|
SELECT
|
|
|
|
document_progress.user_id, document_progress.document_id, document_progress.device_id, document_progress.percentage, document_progress.progress, document_progress.created_at,
|
|
|
|
devices.device_name
|
|
|
|
FROM document_progress
|
|
|
|
JOIN devices ON document_progress.device_id = devices.id
|
|
|
|
WHERE
|
|
|
|
document_progress.user_id = ?1
|
|
|
|
AND document_progress.document_id = ?2
|
|
|
|
ORDER BY
|
|
|
|
document_progress.created_at
|
|
|
|
DESC
|
|
|
|
LIMIT 1
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetProgressParams struct {
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type GetProgressRow struct {
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
DeviceID string `json:"device_id"`
|
|
|
|
Percentage float64 `json:"percentage"`
|
|
|
|
Progress string `json:"progress"`
|
|
|
|
CreatedAt time.Time `json:"created_at"`
|
|
|
|
DeviceName string `json:"device_name"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) (GetProgressRow, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getProgress, arg.UserID, arg.DocumentID)
|
|
|
|
var i GetProgressRow
|
|
|
|
err := row.Scan(
|
|
|
|
&i.UserID,
|
|
|
|
&i.DocumentID,
|
|
|
|
&i.DeviceID,
|
|
|
|
&i.Percentage,
|
|
|
|
&i.Progress,
|
|
|
|
&i.CreatedAt,
|
|
|
|
&i.DeviceName,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const getUser = `-- name: GetUser :one
|
2023-09-21 00:35:01 +00:00
|
|
|
SELECT id, pass, admin, time_offset, created_at FROM users
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE id = ?1 LIMIT 1
|
|
|
|
`
|
|
|
|
|
|
|
|
func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, getUser, userID)
|
|
|
|
var i User
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Pass,
|
|
|
|
&i.Admin,
|
2023-09-21 00:35:01 +00:00
|
|
|
&i.TimeOffset,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
2023-10-03 20:47:38 +00:00
|
|
|
const getUserStreaks = `-- name: GetUserStreaks :many
|
|
|
|
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks
|
|
|
|
WHERE user_id = ?1
|
2023-09-18 23:57:18 +00:00
|
|
|
`
|
|
|
|
|
2023-10-03 20:47:38 +00:00
|
|
|
func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStreak, error) {
|
|
|
|
rows, err := q.db.QueryContext(ctx, getUserStreaks, userID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []UserStreak
|
|
|
|
for rows.Next() {
|
|
|
|
var i UserStreak
|
|
|
|
if err := rows.Scan(
|
|
|
|
&i.UserID,
|
|
|
|
&i.Window,
|
|
|
|
&i.MaxStreak,
|
|
|
|
&i.MaxStreakStartDate,
|
|
|
|
&i.MaxStreakEndDate,
|
|
|
|
&i.CurrentStreak,
|
|
|
|
&i.CurrentStreakStartDate,
|
|
|
|
&i.CurrentStreakEndDate,
|
|
|
|
); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
2023-09-18 23:57:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const getUsers = `-- name: GetUsers :many
|
2023-09-21 00:35:01 +00:00
|
|
|
SELECT id, pass, admin, time_offset, created_at FROM users
|
2023-09-18 23:57:18 +00:00
|
|
|
WHERE
|
|
|
|
users.id = ?1
|
|
|
|
OR ?1 IN (
|
|
|
|
SELECT id
|
|
|
|
FROM users
|
|
|
|
WHERE id = ?1
|
|
|
|
AND admin = 1
|
|
|
|
)
|
|
|
|
ORDER BY created_at DESC
|
|
|
|
LIMIT ?3
|
|
|
|
OFFSET ?2
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetUsersParams struct {
|
|
|
|
User string `json:"user"`
|
|
|
|
Offset int64 `json:"offset"`
|
|
|
|
Limit int64 `json:"limit"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetUsers(ctx context.Context, arg GetUsersParams) ([]User, error) {
|
|
|
|
rows, err := q.db.QueryContext(ctx, getUsers, arg.User, arg.Offset, arg.Limit)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []User
|
|
|
|
for rows.Next() {
|
|
|
|
var i User
|
|
|
|
if err := rows.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Pass,
|
|
|
|
&i.Admin,
|
2023-09-21 00:35:01 +00:00
|
|
|
&i.TimeOffset,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.CreatedAt,
|
|
|
|
); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
2023-10-03 20:47:38 +00:00
|
|
|
const getWPMLeaderboard = `-- name: GetWPMLeaderboard :many
|
|
|
|
SELECT
|
|
|
|
user_id,
|
|
|
|
CAST(SUM(words_read) AS INTEGER) AS total_words_read,
|
|
|
|
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
|
|
|
ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2)
|
|
|
|
AS wpm
|
|
|
|
FROM document_user_statistics
|
|
|
|
WHERE words_read > 0
|
|
|
|
GROUP BY user_id
|
|
|
|
ORDER BY wpm DESC
|
|
|
|
`
|
|
|
|
|
|
|
|
type GetWPMLeaderboardRow struct {
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
TotalWordsRead int64 `json:"total_words_read"`
|
|
|
|
TotalSeconds int64 `json:"total_seconds"`
|
|
|
|
Wpm float64 `json:"wpm"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetWPMLeaderboard(ctx context.Context) ([]GetWPMLeaderboardRow, error) {
|
|
|
|
rows, err := q.db.QueryContext(ctx, getWPMLeaderboard)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
var items []GetWPMLeaderboardRow
|
|
|
|
for rows.Next() {
|
|
|
|
var i GetWPMLeaderboardRow
|
|
|
|
if err := rows.Scan(
|
|
|
|
&i.UserID,
|
|
|
|
&i.TotalWordsRead,
|
|
|
|
&i.TotalSeconds,
|
|
|
|
&i.Wpm,
|
|
|
|
); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
items = append(items, i)
|
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
2023-09-18 23:57:18 +00:00
|
|
|
const getWantedDocuments = `-- name: GetWantedDocuments :many
|
2023-09-19 23:29:55 +00:00
|
|
|
SELECT
|
|
|
|
CAST(value AS TEXT) AS id,
|
|
|
|
CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
|
2023-09-21 00:35:01 +00:00
|
|
|
CAST((IFNULL(documents.synced, false) != true) AS BOOLEAN) AS want_metadata
|
2023-09-18 23:57:18 +00:00
|
|
|
FROM json_each(?1)
|
|
|
|
LEFT JOIN documents
|
|
|
|
ON value = documents.id
|
|
|
|
WHERE (
|
|
|
|
documents.id IS NOT NULL
|
2023-09-19 23:29:55 +00:00
|
|
|
AND documents.deleted = false
|
|
|
|
AND (
|
2023-09-21 00:35:01 +00:00
|
|
|
documents.synced = false
|
|
|
|
OR documents.filepath IS NULL
|
2023-09-19 23:29:55 +00:00
|
|
|
)
|
2023-09-18 23:57:18 +00:00
|
|
|
)
|
|
|
|
OR (documents.id IS NULL)
|
|
|
|
OR CAST(?1 AS TEXT) != CAST(?1 AS TEXT)
|
|
|
|
`
|
|
|
|
|
2023-09-19 23:29:55 +00:00
|
|
|
type GetWantedDocumentsRow struct {
|
|
|
|
ID string `json:"id"`
|
|
|
|
WantFile bool `json:"want_file"`
|
|
|
|
WantMetadata bool `json:"want_metadata"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) GetWantedDocuments(ctx context.Context, documentIds string) ([]GetWantedDocumentsRow, error) {
|
2023-09-18 23:57:18 +00:00
|
|
|
rows, err := q.db.QueryContext(ctx, getWantedDocuments, documentIds)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
2023-09-19 23:29:55 +00:00
|
|
|
var items []GetWantedDocumentsRow
|
2023-09-18 23:57:18 +00:00
|
|
|
for rows.Next() {
|
2023-09-19 23:29:55 +00:00
|
|
|
var i GetWantedDocumentsRow
|
|
|
|
if err := rows.Scan(&i.ID, &i.WantFile, &i.WantMetadata); err != nil {
|
2023-09-18 23:57:18 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2023-09-19 23:29:55 +00:00
|
|
|
items = append(items, i)
|
2023-09-18 23:57:18 +00:00
|
|
|
}
|
|
|
|
if err := rows.Close(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if err := rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const updateDocumentDeleted = `-- name: UpdateDocumentDeleted :one
|
|
|
|
UPDATE documents
|
|
|
|
SET
|
|
|
|
deleted = ?1
|
|
|
|
WHERE id = ?2
|
2023-10-01 23:17:22 +00:00
|
|
|
RETURNING id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at
|
2023-09-18 23:57:18 +00:00
|
|
|
`
|
|
|
|
|
|
|
|
type UpdateDocumentDeletedParams struct {
|
|
|
|
Deleted bool `json:"-"`
|
|
|
|
ID string `json:"id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) UpdateDocumentDeleted(ctx context.Context, arg UpdateDocumentDeletedParams) (Document, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, updateDocumentDeleted, arg.Deleted, arg.ID)
|
|
|
|
var i Document
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const updateDocumentSync = `-- name: UpdateDocumentSync :one
|
|
|
|
UPDATE documents
|
|
|
|
SET
|
|
|
|
synced = ?1
|
|
|
|
WHERE id = ?2
|
2023-10-01 23:17:22 +00:00
|
|
|
RETURNING id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at
|
2023-09-18 23:57:18 +00:00
|
|
|
`
|
|
|
|
|
|
|
|
type UpdateDocumentSyncParams struct {
|
|
|
|
Synced bool `json:"-"`
|
|
|
|
ID string `json:"id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) UpdateDocumentSync(ctx context.Context, arg UpdateDocumentSyncParams) (Document, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, updateDocumentSync, arg.Synced, arg.ID)
|
|
|
|
var i Document
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const updateProgress = `-- name: UpdateProgress :one
|
|
|
|
INSERT OR REPLACE INTO document_progress (
|
|
|
|
user_id,
|
|
|
|
document_id,
|
|
|
|
device_id,
|
|
|
|
percentage,
|
|
|
|
progress
|
|
|
|
)
|
|
|
|
VALUES (?, ?, ?, ?, ?)
|
|
|
|
RETURNING user_id, document_id, device_id, percentage, progress, created_at
|
|
|
|
`
|
|
|
|
|
|
|
|
type UpdateProgressParams struct {
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
DocumentID string `json:"document_id"`
|
|
|
|
DeviceID string `json:"device_id"`
|
|
|
|
Percentage float64 `json:"percentage"`
|
|
|
|
Progress string `json:"progress"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) UpdateProgress(ctx context.Context, arg UpdateProgressParams) (DocumentProgress, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, updateProgress,
|
|
|
|
arg.UserID,
|
|
|
|
arg.DocumentID,
|
|
|
|
arg.DeviceID,
|
|
|
|
arg.Percentage,
|
|
|
|
arg.Progress,
|
|
|
|
)
|
|
|
|
var i DocumentProgress
|
|
|
|
err := row.Scan(
|
|
|
|
&i.UserID,
|
|
|
|
&i.DocumentID,
|
|
|
|
&i.DeviceID,
|
|
|
|
&i.Percentage,
|
|
|
|
&i.Progress,
|
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
2023-09-27 22:58:47 +00:00
|
|
|
const updateUser = `-- name: UpdateUser :one
|
|
|
|
UPDATE users
|
|
|
|
SET
|
|
|
|
pass = COALESCE(?1, pass),
|
|
|
|
time_offset = COALESCE(?2, time_offset)
|
|
|
|
WHERE id = ?3
|
|
|
|
RETURNING id, pass, admin, time_offset, created_at
|
|
|
|
`
|
|
|
|
|
|
|
|
type UpdateUserParams struct {
|
|
|
|
Password *string `json:"-"`
|
|
|
|
TimeOffset *string `json:"time_offset"`
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, updateUser, arg.Password, arg.TimeOffset, arg.UserID)
|
|
|
|
var i User
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Pass,
|
|
|
|
&i.Admin,
|
|
|
|
&i.TimeOffset,
|
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
2023-09-18 23:57:18 +00:00
|
|
|
const upsertDevice = `-- name: UpsertDevice :one
|
|
|
|
INSERT INTO devices (id, user_id, device_name)
|
|
|
|
VALUES (?, ?, ?)
|
|
|
|
ON CONFLICT DO UPDATE
|
|
|
|
SET
|
|
|
|
device_name = COALESCE(excluded.device_name, device_name)
|
|
|
|
RETURNING id, user_id, device_name, created_at, sync
|
|
|
|
`
|
|
|
|
|
|
|
|
type UpsertDeviceParams struct {
|
|
|
|
ID string `json:"id"`
|
|
|
|
UserID string `json:"user_id"`
|
|
|
|
DeviceName string `json:"device_name"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) UpsertDevice(ctx context.Context, arg UpsertDeviceParams) (Device, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, upsertDevice, arg.ID, arg.UserID, arg.DeviceName)
|
|
|
|
var i Device
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.UserID,
|
|
|
|
&i.DeviceName,
|
|
|
|
&i.CreatedAt,
|
|
|
|
&i.Sync,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|
|
|
|
|
|
|
|
const upsertDocument = `-- name: UpsertDocument :one
|
|
|
|
INSERT INTO documents (
|
|
|
|
id,
|
|
|
|
md5,
|
|
|
|
filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
title,
|
|
|
|
author,
|
|
|
|
series,
|
|
|
|
series_index,
|
|
|
|
lang,
|
|
|
|
description,
|
2023-10-01 23:17:22 +00:00
|
|
|
words,
|
2023-09-23 18:14:57 +00:00
|
|
|
olid,
|
|
|
|
gbid,
|
|
|
|
isbn10,
|
|
|
|
isbn13
|
2023-09-18 23:57:18 +00:00
|
|
|
)
|
2023-10-01 23:17:22 +00:00
|
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
2023-09-18 23:57:18 +00:00
|
|
|
ON CONFLICT DO UPDATE
|
|
|
|
SET
|
|
|
|
md5 = COALESCE(excluded.md5, md5),
|
|
|
|
filepath = COALESCE(excluded.filepath, filepath),
|
2023-09-23 18:14:57 +00:00
|
|
|
coverfile = COALESCE(excluded.coverfile, coverfile),
|
2023-09-18 23:57:18 +00:00
|
|
|
title = COALESCE(excluded.title, title),
|
|
|
|
author = COALESCE(excluded.author, author),
|
|
|
|
series = COALESCE(excluded.series, series),
|
|
|
|
series_index = COALESCE(excluded.series_index, series_index),
|
|
|
|
lang = COALESCE(excluded.lang, lang),
|
|
|
|
description = COALESCE(excluded.description, description),
|
2023-10-01 23:17:22 +00:00
|
|
|
words = COALESCE(excluded.words, words),
|
2023-09-23 18:14:57 +00:00
|
|
|
olid = COALESCE(excluded.olid, olid),
|
|
|
|
gbid = COALESCE(excluded.gbid, gbid),
|
|
|
|
isbn10 = COALESCE(excluded.isbn10, isbn10),
|
|
|
|
isbn13 = COALESCE(excluded.isbn13, isbn13)
|
2023-10-01 23:17:22 +00:00
|
|
|
RETURNING id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at
|
2023-09-18 23:57:18 +00:00
|
|
|
`
|
|
|
|
|
|
|
|
type UpsertDocumentParams struct {
|
|
|
|
ID string `json:"id"`
|
|
|
|
Md5 *string `json:"md5"`
|
|
|
|
Filepath *string `json:"filepath"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Coverfile *string `json:"coverfile"`
|
2023-09-18 23:57:18 +00:00
|
|
|
Title *string `json:"title"`
|
|
|
|
Author *string `json:"author"`
|
|
|
|
Series *string `json:"series"`
|
|
|
|
SeriesIndex *int64 `json:"series_index"`
|
|
|
|
Lang *string `json:"lang"`
|
|
|
|
Description *string `json:"description"`
|
2023-10-01 23:17:22 +00:00
|
|
|
Words *int64 `json:"words"`
|
2023-09-18 23:57:18 +00:00
|
|
|
Olid *string `json:"-"`
|
2023-09-23 18:14:57 +00:00
|
|
|
Gbid *string `json:"gbid"`
|
|
|
|
Isbn10 *string `json:"isbn10"`
|
|
|
|
Isbn13 *string `json:"isbn13"`
|
2023-09-18 23:57:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (q *Queries) UpsertDocument(ctx context.Context, arg UpsertDocumentParams) (Document, error) {
|
|
|
|
row := q.db.QueryRowContext(ctx, upsertDocument,
|
|
|
|
arg.ID,
|
|
|
|
arg.Md5,
|
|
|
|
arg.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
arg.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
arg.Title,
|
|
|
|
arg.Author,
|
|
|
|
arg.Series,
|
|
|
|
arg.SeriesIndex,
|
|
|
|
arg.Lang,
|
|
|
|
arg.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
arg.Words,
|
2023-09-18 23:57:18 +00:00
|
|
|
arg.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
arg.Gbid,
|
|
|
|
arg.Isbn10,
|
|
|
|
arg.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
)
|
|
|
|
var i Document
|
|
|
|
err := row.Scan(
|
|
|
|
&i.ID,
|
|
|
|
&i.Md5,
|
|
|
|
&i.Filepath,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Coverfile,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Title,
|
|
|
|
&i.Author,
|
|
|
|
&i.Series,
|
|
|
|
&i.SeriesIndex,
|
|
|
|
&i.Lang,
|
|
|
|
&i.Description,
|
2023-10-01 23:17:22 +00:00
|
|
|
&i.Words,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Gbid,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Olid,
|
2023-09-23 18:14:57 +00:00
|
|
|
&i.Isbn10,
|
|
|
|
&i.Isbn13,
|
2023-09-18 23:57:18 +00:00
|
|
|
&i.Synced,
|
|
|
|
&i.Deleted,
|
|
|
|
&i.UpdatedAt,
|
|
|
|
&i.CreatedAt,
|
|
|
|
)
|
|
|
|
return i, err
|
|
|
|
}
|