Compare commits
No commits in common. "75ed394f8d9f5eac60c5fff2da8e4b2c7352c8c9" and "5865fe3c13432404ba5897e968b9c0717578ecf2" have entirely different histories.
75ed394f8d
...
5865fe3c13
15
.drone.yml
15
.drone.yml
@ -4,10 +4,21 @@ name: default
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Unit Tests
|
# Unit Tests
|
||||||
- name: tests
|
- name: unit test
|
||||||
image: golang
|
image: golang
|
||||||
commands:
|
commands:
|
||||||
- make tests
|
- make tests_unit
|
||||||
|
|
||||||
|
# Integration Tests (Every Month)
|
||||||
|
- name: integration test
|
||||||
|
image: golang
|
||||||
|
commands:
|
||||||
|
- make tests_integration
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- cron
|
||||||
|
cron:
|
||||||
|
- integration-test
|
||||||
|
|
||||||
# Fetch tags
|
# Fetch tags
|
||||||
- name: fetch tags
|
- name: fetch tags
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,4 +3,3 @@ TODO.md
|
|||||||
data/
|
data/
|
||||||
build/
|
build/
|
||||||
.direnv/
|
.direnv/
|
||||||
cover.html
|
|
||||||
|
9
Makefile
9
Makefile
@ -42,7 +42,8 @@ dev: build_tailwind
|
|||||||
clean:
|
clean:
|
||||||
rm -rf ./build
|
rm -rf ./build
|
||||||
|
|
||||||
tests:
|
tests_integration:
|
||||||
SET_TEST=set_val go test -coverpkg=./... ./... -coverprofile=./cover.out
|
go test -v -tags=integration -coverpkg=./... ./metadata
|
||||||
go tool cover -html=./cover.out -o ./cover.html
|
|
||||||
rm ./cover.out
|
tests_unit:
|
||||||
|
SET_TEST=set_val go test -v -coverpkg=./... ./...
|
||||||
|
@ -19,7 +19,6 @@ import (
|
|||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/itchyny/gojq"
|
"github.com/itchyny/gojq"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/antholume/metadata"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type adminAction string
|
type adminAction string
|
||||||
@ -64,7 +63,7 @@ func (api *API) appPerformAdminAction(c *gin.Context) {
|
|||||||
var rAdminAction requestAdminAction
|
var rAdminAction requestAdminAction
|
||||||
if err := c.ShouldBind(&rAdminAction); err != nil {
|
if err := c.ShouldBind(&rAdminAction); err != nil {
|
||||||
log.Error("Invalid Form Bind: ", err)
|
log.Error("Invalid Form Bind: ", err)
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -76,7 +75,6 @@ func (api *API) appPerformAdminAction(c *gin.Context) {
|
|||||||
// 2. Select all / deselect?
|
// 2. Select all / deselect?
|
||||||
case adminCacheTables:
|
case adminCacheTables:
|
||||||
go api.db.CacheTempTables()
|
go api.db.CacheTempTables()
|
||||||
// TODO - Message
|
|
||||||
case adminRestore:
|
case adminRestore:
|
||||||
api.processRestoreFile(rAdminAction, c)
|
api.processRestoreFile(rAdminAction, c)
|
||||||
return
|
return
|
||||||
@ -85,7 +83,7 @@ func (api *API) appPerformAdminAction(c *gin.Context) {
|
|||||||
_, err := api.db.DB.ExecContext(api.db.Ctx, "VACUUM;")
|
_, err := api.db.DB.ExecContext(api.db.Ctx, "VACUUM;")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to vacuum DB: ", err)
|
log.Error("Unable to vacuum DB: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to vacuum database")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to vacuum database.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,7 +126,7 @@ func (api *API) appGetAdminLogs(c *gin.Context) {
|
|||||||
var rAdminLogs requestAdminLogs
|
var rAdminLogs requestAdminLogs
|
||||||
if err := c.ShouldBindQuery(&rAdminLogs); err != nil {
|
if err := c.ShouldBindQuery(&rAdminLogs); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid URI parameters")
|
appErrorPage(c, http.StatusNotFound, "Invalid URI parameters.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
rAdminLogs.Filter = strings.TrimSpace(rAdminLogs.Filter)
|
rAdminLogs.Filter = strings.TrimSpace(rAdminLogs.Filter)
|
||||||
@ -138,14 +136,14 @@ func (api *API) appGetAdminLogs(c *gin.Context) {
|
|||||||
parsed, err := gojq.Parse(rAdminLogs.Filter)
|
parsed, err := gojq.Parse(rAdminLogs.Filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to parse JQ filter")
|
log.Error("Unable to parse JQ filter")
|
||||||
appErrorPage(c, http.StatusNotFound, "Unable to parse JQ filter")
|
appErrorPage(c, http.StatusNotFound, "Unable to parse JQ filter.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
jqFilter, err = gojq.Compile(parsed)
|
jqFilter, err = gojq.Compile(parsed)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to compile JQ filter")
|
log.Error("Unable to compile JQ filter")
|
||||||
appErrorPage(c, http.StatusNotFound, "Unable to compile JQ filter")
|
appErrorPage(c, http.StatusNotFound, "Unable to compile JQ filter.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -154,7 +152,7 @@ func (api *API) appGetAdminLogs(c *gin.Context) {
|
|||||||
logPath := filepath.Join(api.cfg.ConfigPath, "logs/antholume.log")
|
logPath := filepath.Join(api.cfg.ConfigPath, "logs/antholume.log")
|
||||||
logFile, err := os.Open(logPath)
|
logFile, err := os.Open(logPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
appErrorPage(c, http.StatusBadRequest, "Missing AnthoLume log file")
|
appErrorPage(c, http.StatusBadRequest, "Missing AnthoLume log file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer logFile.Close()
|
defer logFile.Close()
|
||||||
@ -231,7 +229,7 @@ func (api *API) appGetAdminImport(c *gin.Context) {
|
|||||||
var rImportFolder requestAdminImport
|
var rImportFolder requestAdminImport
|
||||||
if err := c.ShouldBindQuery(&rImportFolder); err != nil {
|
if err := c.ShouldBindQuery(&rImportFolder); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
appErrorPage(c, http.StatusNotFound, "Invalid directory.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -246,7 +244,7 @@ func (api *API) appGetAdminImport(c *gin.Context) {
|
|||||||
dPath, err := filepath.Abs(api.cfg.DataPath)
|
dPath, err := filepath.Abs(api.cfg.DataPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Absolute filepath error: ", rImportFolder.Directory)
|
log.Error("Absolute filepath error: ", rImportFolder.Directory)
|
||||||
appErrorPage(c, http.StatusNotFound, "Unable to get data directory absolute path")
|
appErrorPage(c, http.StatusNotFound, "Unable to get data directory absolute path.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,7 +254,7 @@ func (api *API) appGetAdminImport(c *gin.Context) {
|
|||||||
entries, err := os.ReadDir(rImportFolder.Directory)
|
entries, err := os.ReadDir(rImportFolder.Directory)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Invalid directory: ", rImportFolder.Directory)
|
log.Error("Invalid directory: ", rImportFolder.Directory)
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
appErrorPage(c, http.StatusNotFound, "Invalid directory.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -281,46 +279,13 @@ func (api *API) appPerformAdminImport(c *gin.Context) {
|
|||||||
var rAdminImport requestAdminImport
|
var rAdminImport requestAdminImport
|
||||||
if err := c.ShouldBind(&rAdminImport); err != nil {
|
if err := c.ShouldBind(&rAdminImport); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
appErrorPage(c, http.StatusNotFound, "Invalid directory.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO - Store results for approval?
|
// TODO
|
||||||
|
|
||||||
// Walk import directory & copy or import files
|
fmt.Println(rAdminImport)
|
||||||
importDirectory := filepath.Clean(rAdminImport.Directory)
|
|
||||||
_ = filepath.WalkDir(importDirectory, func(currentPath string, f fs.DirEntry, err error) error {
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if f.IsDir() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get metadata
|
|
||||||
fileMeta, err := metadata.GetMetadata(currentPath)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("metadata error: %v\n", err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only needed if copying
|
|
||||||
newName := deriveBaseFileName(fileMeta)
|
|
||||||
|
|
||||||
// Open File on Disk
|
|
||||||
// file, err := os.Open(currentPath)
|
|
||||||
// if err != nil {
|
|
||||||
// return err
|
|
||||||
// }
|
|
||||||
// defer file.Close()
|
|
||||||
|
|
||||||
// TODO - BasePath in DB
|
|
||||||
// TODO - Copy / Import
|
|
||||||
|
|
||||||
fmt.Printf("New File Metadata: %s\n", newName)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
templateVars["CurrentPath"] = filepath.Clean(rAdminImport.Directory)
|
templateVars["CurrentPath"] = filepath.Clean(rAdminImport.Directory)
|
||||||
|
|
||||||
@ -332,14 +297,14 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
uploadedFile, err := rAdminAction.RestoreFile.Open()
|
uploadedFile, err := rAdminAction.RestoreFile.Open()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Error: ", err)
|
log.Error("File Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to open file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to open file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
fileMime, err := mimetype.DetectReader(uploadedFile)
|
fileMime, err := mimetype.DetectReader(uploadedFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("MIME Error")
|
log.Error("MIME Error")
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
fileExtension := fileMime.Extension()
|
fileExtension := fileMime.Extension()
|
||||||
@ -347,7 +312,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
// Validate Extension
|
// Validate Extension
|
||||||
if !slices.Contains([]string{".zip"}, fileExtension) {
|
if !slices.Contains([]string{".zip"}, fileExtension) {
|
||||||
log.Error("Invalid FileType: ", fileExtension)
|
log.Error("Invalid FileType: ", fileExtension)
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid filetype")
|
appErrorPage(c, http.StatusBadRequest, "Invalid filetype.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -355,7 +320,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
tempFile, err := os.CreateTemp("", "restore")
|
tempFile, err := os.CreateTemp("", "restore")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn("Temp File Create Error: ", err)
|
log.Warn("Temp File Create Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to create temp file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to create temp file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer os.Remove(tempFile.Name())
|
defer os.Remove(tempFile.Name())
|
||||||
@ -365,7 +330,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
err = c.SaveUploadedFile(rAdminAction.RestoreFile, tempFile.Name())
|
err = c.SaveUploadedFile(rAdminAction.RestoreFile, tempFile.Name())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Error: ", err)
|
log.Error("File Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to save file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -373,7 +338,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
fileInfo, err := tempFile.Stat()
|
fileInfo, err := tempFile.Stat()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Error: ", err)
|
log.Error("File Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to read file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to read file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -381,7 +346,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
zipReader, err := zip.NewReader(tempFile, fileInfo.Size())
|
zipReader, err := zip.NewReader(tempFile, fileInfo.Size())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("ZIP Error: ", err)
|
log.Error("ZIP Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to read zip")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to read zip.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -415,7 +380,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
backupFile, err := os.Create(backupFilePath)
|
backupFile, err := os.Create(backupFilePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to create backup file: ", err)
|
log.Error("Unable to create backup file: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to create backup file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to create backup file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer backupFile.Close()
|
defer backupFile.Close()
|
||||||
@ -424,7 +389,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
_, err = api.db.DB.ExecContext(api.db.Ctx, "VACUUM;")
|
_, err = api.db.DB.ExecContext(api.db.Ctx, "VACUUM;")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to vacuum DB: ", err)
|
log.Error("Unable to vacuum DB: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to vacuum database")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to vacuum database.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -433,7 +398,7 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
err = api.createBackup(w, []string{"covers", "documents"})
|
err = api.createBackup(w, []string{"covers", "documents"})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to save backup file: ", err)
|
log.Error("Unable to save backup file: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to save backup file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save backup file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -441,26 +406,26 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
err = api.removeData()
|
err = api.removeData()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to delete data: ", err)
|
log.Error("Unable to delete data: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to delete data")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to delete data.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Restore Data
|
// Restore Data
|
||||||
err = api.restoreData(zipReader)
|
err = api.restoreData(zipReader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to restore data")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to restore data.")
|
||||||
log.Panic("Unable to restore data: ", err)
|
log.Panic("Unable to restore data: ", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reinit DB
|
// Reinit DB
|
||||||
if err := api.db.Reload(); err != nil {
|
if err := api.db.Reload(); err != nil {
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to reload DB")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to reload DB.")
|
||||||
log.Panicf("Unable to reload DB: %v", err)
|
log.Panicf("Unable to reload DB: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rotate Auth Hashes
|
// Rotate Auth Hashes
|
||||||
if err := api.rotateAllAuthHashes(); err != nil {
|
if err := api.rotateAllAuthHashes(); err != nil {
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to rotate hashes")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to rotate hashes.")
|
||||||
log.Panicf("Unable to rotate auth hashes: %v", err)
|
log.Panicf("Unable to rotate auth hashes: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -468,7 +433,6 @@ func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Conte
|
|||||||
c.Redirect(http.StatusFound, "/login")
|
c.Redirect(http.StatusFound, "/login")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Restore all data
|
|
||||||
func (api *API) restoreData(zipReader *zip.Reader) error {
|
func (api *API) restoreData(zipReader *zip.Reader) error {
|
||||||
// Ensure Directories
|
// Ensure Directories
|
||||||
api.cfg.EnsureDirectories()
|
api.cfg.EnsureDirectories()
|
||||||
@ -499,7 +463,6 @@ func (api *API) restoreData(zipReader *zip.Reader) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove all data
|
|
||||||
func (api *API) removeData() error {
|
func (api *API) removeData() error {
|
||||||
allPaths := []string{
|
allPaths := []string{
|
||||||
"covers",
|
"covers",
|
||||||
@ -522,7 +485,6 @@ func (api *API) removeData() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Backup all data
|
|
||||||
func (api *API) createBackup(w io.Writer, directories []string) error {
|
func (api *API) createBackup(w io.Writer, directories []string) error {
|
||||||
ar := zip.NewWriter(w)
|
ar := zip.NewWriter(w)
|
||||||
|
|
||||||
|
@ -157,7 +157,7 @@ func (api *API) appGetDocument(c *gin.Context) {
|
|||||||
var rDocID requestDocumentID
|
var rDocID requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDocID); err != nil {
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
appErrorPage(c, http.StatusNotFound, "Invalid document.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -361,7 +361,7 @@ func (api *API) appGetDocumentProgress(c *gin.Context) {
|
|||||||
var rDoc requestDocumentID
|
var rDoc requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDoc); err != nil {
|
if err := c.ShouldBindUri(&rDoc); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
appErrorPage(c, http.StatusNotFound, "Invalid document.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -417,7 +417,7 @@ func (api *API) appUploadNewDocument(c *gin.Context) {
|
|||||||
var rDocUpload requestDocumentUpload
|
var rDocUpload requestDocumentUpload
|
||||||
if err := c.ShouldBind(&rDocUpload); err != nil {
|
if err := c.ShouldBind(&rDocUpload); err != nil {
|
||||||
log.Error("Invalid Form Bind")
|
log.Error("Invalid Form Bind")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -426,92 +426,153 @@ func (api *API) appUploadNewDocument(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate Type & Derive Extension on MIME
|
||||||
|
uploadedFile, err := rDocUpload.DocumentFile.Open()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to open file.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fileMime, err := mimetype.DetectReader(uploadedFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("MIME Error")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fileExtension := fileMime.Extension()
|
||||||
|
|
||||||
|
// Validate Extension
|
||||||
|
if !slices.Contains([]string{".epub"}, fileExtension) {
|
||||||
|
log.Error("Invalid FileType: ", fileExtension)
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid filetype.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Create Temp File
|
// Create Temp File
|
||||||
tempFile, err := os.CreateTemp("", "book")
|
tempFile, err := os.CreateTemp("", "book")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warn("Temp File Create Error: ", err)
|
log.Warn("Temp File Create Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to create temp file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to create temp file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer os.Remove(tempFile.Name())
|
defer os.Remove(tempFile.Name())
|
||||||
defer tempFile.Close()
|
defer tempFile.Close()
|
||||||
|
|
||||||
// Save Temp File
|
// Save Temp
|
||||||
err = c.SaveUploadedFile(rDocUpload.DocumentFile, tempFile.Name())
|
err = c.SaveUploadedFile(rDocUpload.DocumentFile, tempFile.Name())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Error: ", err)
|
log.Error("File Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to save file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Metadata
|
// Get Metadata
|
||||||
metadataInfo, err := metadata.GetMetadata(tempFile.Name())
|
metadataInfo, err := metadata.GetMetadata(tempFile.Name())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("unable to acquire metadata: %v", err)
|
log.Warn("GetMetadata Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to acquire metadata")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to acquire file metadata.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check Already Exists
|
// Calculate Partial MD5 ID
|
||||||
_, err = api.db.Queries.GetDocument(api.db.Ctx, *metadataInfo.PartialMD5)
|
partialMD5, err := utils.CalculatePartialMD5(tempFile.Name())
|
||||||
if err == nil {
|
if err != nil {
|
||||||
log.Warnf("document already exists: %s", *metadataInfo.PartialMD5)
|
log.Warn("Partial MD5 Error: ", err)
|
||||||
c.Redirect(http.StatusFound, fmt.Sprintf("./documents/%s", *metadataInfo.PartialMD5))
|
appErrorPage(c, http.StatusInternalServerError, "Unable to calculate partial MD5.")
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Derive & Sanitize File Name
|
// Check Exists
|
||||||
fileName := deriveBaseFileName(metadataInfo)
|
_, err = api.db.Queries.GetDocument(api.db.Ctx, partialMD5)
|
||||||
safePath := filepath.Join(api.cfg.DataPath, "documents", fileName)
|
if err == nil {
|
||||||
|
c.Redirect(http.StatusFound, fmt.Sprintf("./documents/%s", partialMD5))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Open Destination File
|
// Calculate Actual MD5
|
||||||
|
fileHash, err := getFileMD5(tempFile.Name())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("MD5 Hash Failure: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to calculate MD5.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Word Count
|
||||||
|
wordCount, err := metadata.GetWordCount(tempFile.Name())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Word Count Failure: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to calculate word count.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Filename
|
||||||
|
var fileName string
|
||||||
|
if *metadataInfo.Author != "" {
|
||||||
|
fileName = fileName + *metadataInfo.Author
|
||||||
|
} else {
|
||||||
|
fileName = fileName + "Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
if *metadataInfo.Title != "" {
|
||||||
|
fileName = fileName + " - " + *metadataInfo.Title
|
||||||
|
} else {
|
||||||
|
fileName = fileName + " - Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Slashes
|
||||||
|
fileName = strings.ReplaceAll(fileName, "/", "")
|
||||||
|
|
||||||
|
// Derive & Sanitize File Name
|
||||||
|
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, partialMD5, fileExtension))
|
||||||
|
|
||||||
|
// Generate Storage Path & Open File
|
||||||
|
safePath := filepath.Join(api.cfg.DataPath, "documents", fileName)
|
||||||
destFile, err := os.Create(safePath)
|
destFile, err := os.Create(safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("unable to open destination file: %v", err)
|
log.Error("Dest File Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to open destination file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer destFile.Close()
|
defer destFile.Close()
|
||||||
|
|
||||||
// Copy File
|
// Copy File
|
||||||
if _, err = io.Copy(destFile, tempFile); err != nil {
|
if _, err = io.Copy(destFile, tempFile); err != nil {
|
||||||
log.Errorf("unable to save file: %v", err)
|
log.Error("Copy Temp File Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to save file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Document
|
// Upsert Document
|
||||||
if _, err = api.db.Queries.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
if _, err = api.db.Queries.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
||||||
ID: *metadataInfo.PartialMD5,
|
ID: partialMD5,
|
||||||
Title: metadataInfo.Title,
|
Title: metadataInfo.Title,
|
||||||
Author: metadataInfo.Author,
|
Author: metadataInfo.Author,
|
||||||
Description: metadataInfo.Description,
|
Description: metadataInfo.Description,
|
||||||
Md5: metadataInfo.MD5,
|
Words: &wordCount,
|
||||||
Words: metadataInfo.WordCount,
|
Md5: fileHash,
|
||||||
Filepath: &fileName,
|
Filepath: &fileName,
|
||||||
|
|
||||||
// TODO (BasePath):
|
|
||||||
// - Should be current config directory
|
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Errorf("UpsertDocument DB Error: %v", err)
|
log.Error("UpsertDocument DB Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("UpsertDocument DB Error: %v", err))
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("UpsertDocument DB Error: %v", err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Redirect(http.StatusFound, fmt.Sprintf("./documents/%s", *metadataInfo.PartialMD5))
|
c.Redirect(http.StatusFound, fmt.Sprintf("./documents/%s", partialMD5))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) appEditDocument(c *gin.Context) {
|
func (api *API) appEditDocument(c *gin.Context) {
|
||||||
var rDocID requestDocumentID
|
var rDocID requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDocID); err != nil {
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
appErrorPage(c, http.StatusNotFound, "Invalid document.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var rDocEdit requestDocumentEdit
|
var rDocEdit requestDocumentEdit
|
||||||
if err := c.ShouldBind(&rDocEdit); err != nil {
|
if err := c.ShouldBind(&rDocEdit); err != nil {
|
||||||
log.Error("Invalid Form Bind")
|
log.Error("Invalid Form Bind")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -525,7 +586,7 @@ func (api *API) appEditDocument(c *gin.Context) {
|
|||||||
rDocEdit.CoverGBID == nil &&
|
rDocEdit.CoverGBID == nil &&
|
||||||
rDocEdit.CoverFile == nil {
|
rDocEdit.CoverFile == nil {
|
||||||
log.Error("Missing Form Values")
|
log.Error("Missing Form Values")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -539,14 +600,14 @@ func (api *API) appEditDocument(c *gin.Context) {
|
|||||||
uploadedFile, err := rDocEdit.CoverFile.Open()
|
uploadedFile, err := rDocEdit.CoverFile.Open()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Error")
|
log.Error("File Error")
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to open file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to open file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
fileMime, err := mimetype.DetectReader(uploadedFile)
|
fileMime, err := mimetype.DetectReader(uploadedFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("MIME Error")
|
log.Error("MIME Error")
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
fileExtension := fileMime.Extension()
|
fileExtension := fileMime.Extension()
|
||||||
@ -554,7 +615,7 @@ func (api *API) appEditDocument(c *gin.Context) {
|
|||||||
// Validate Extension
|
// Validate Extension
|
||||||
if !slices.Contains([]string{".jpg", ".png"}, fileExtension) {
|
if !slices.Contains([]string{".jpg", ".png"}, fileExtension) {
|
||||||
log.Error("Invalid FileType: ", fileExtension)
|
log.Error("Invalid FileType: ", fileExtension)
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid filetype")
|
appErrorPage(c, http.StatusBadRequest, "Invalid filetype.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -566,7 +627,7 @@ func (api *API) appEditDocument(c *gin.Context) {
|
|||||||
err = c.SaveUploadedFile(rDocEdit.CoverFile, safePath)
|
err = c.SaveUploadedFile(rDocEdit.CoverFile, safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Error: ", err)
|
log.Error("File Error: ", err)
|
||||||
appErrorPage(c, http.StatusInternalServerError, "Unable to save file")
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -602,7 +663,7 @@ func (api *API) appDeleteDocument(c *gin.Context) {
|
|||||||
var rDocID requestDocumentID
|
var rDocID requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDocID); err != nil {
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
appErrorPage(c, http.StatusNotFound, "Invalid document.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
changed, err := api.db.Queries.DeleteDocument(api.db.Ctx, rDocID.DocumentID)
|
changed, err := api.db.Queries.DeleteDocument(api.db.Ctx, rDocID.DocumentID)
|
||||||
@ -613,7 +674,7 @@ func (api *API) appDeleteDocument(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
if changed == 0 {
|
if changed == 0 {
|
||||||
log.Error("DeleteDocument DB Error")
|
log.Error("DeleteDocument DB Error")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
appErrorPage(c, http.StatusNotFound, "Invalid document.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -624,14 +685,14 @@ func (api *API) appIdentifyDocument(c *gin.Context) {
|
|||||||
var rDocID requestDocumentID
|
var rDocID requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDocID); err != nil {
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
log.Error("Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
appErrorPage(c, http.StatusNotFound, "Invalid document.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var rDocIdentify requestDocumentIdentify
|
var rDocIdentify requestDocumentIdentify
|
||||||
if err := c.ShouldBind(&rDocIdentify); err != nil {
|
if err := c.ShouldBind(&rDocIdentify); err != nil {
|
||||||
log.Error("Invalid Form Bind")
|
log.Error("Invalid Form Bind")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -649,7 +710,7 @@ func (api *API) appIdentifyDocument(c *gin.Context) {
|
|||||||
// Validate Values
|
// Validate Values
|
||||||
if rDocIdentify.ISBN == nil && rDocIdentify.Title == nil && rDocIdentify.Author == nil {
|
if rDocIdentify.ISBN == nil && rDocIdentify.Title == nil && rDocIdentify.Author == nil {
|
||||||
log.Error("Invalid Form")
|
log.Error("Invalid Form")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -657,7 +718,7 @@ func (api *API) appIdentifyDocument(c *gin.Context) {
|
|||||||
templateVars, auth := api.getBaseTemplateVars("document", c)
|
templateVars, auth := api.getBaseTemplateVars("document", c)
|
||||||
|
|
||||||
// Get Metadata
|
// Get Metadata
|
||||||
metadataResults, err := metadata.SearchMetadata(metadata.SOURCE_GBOOK, metadata.MetadataInfo{
|
metadataResults, err := metadata.SearchMetadata(metadata.GBOOK, metadata.MetadataInfo{
|
||||||
Title: rDocIdentify.Title,
|
Title: rDocIdentify.Title,
|
||||||
Author: rDocIdentify.Author,
|
Author: rDocIdentify.Author,
|
||||||
ISBN10: rDocIdentify.ISBN,
|
ISBN10: rDocIdentify.ISBN,
|
||||||
@ -706,7 +767,7 @@ func (api *API) appSaveNewDocument(c *gin.Context) {
|
|||||||
var rDocAdd requestDocumentAdd
|
var rDocAdd requestDocumentAdd
|
||||||
if err := c.ShouldBind(&rDocAdd); err != nil {
|
if err := c.ShouldBind(&rDocAdd); err != nil {
|
||||||
log.Error("Invalid Form Bind")
|
log.Error("Invalid Form Bind")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -784,7 +845,7 @@ func (api *API) appSaveNewDocument(c *gin.Context) {
|
|||||||
fileName = strings.ReplaceAll(fileName, "/", "")
|
fileName = strings.ReplaceAll(fileName, "/", "")
|
||||||
|
|
||||||
// Derive & Sanitize File Name
|
// Derive & Sanitize File Name
|
||||||
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, *partialMD5, fileExtension))
|
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, partialMD5, fileExtension))
|
||||||
|
|
||||||
// Open Source File
|
// Open Source File
|
||||||
sourceFile, err := os.Open(tempFilePath)
|
sourceFile, err := os.Open(tempFilePath)
|
||||||
@ -840,12 +901,12 @@ func (api *API) appSaveNewDocument(c *gin.Context) {
|
|||||||
|
|
||||||
// Upsert Document
|
// Upsert Document
|
||||||
if _, err = api.db.Queries.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
if _, err = api.db.Queries.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
||||||
ID: *partialMD5,
|
ID: partialMD5,
|
||||||
Title: rDocAdd.Title,
|
Title: rDocAdd.Title,
|
||||||
Author: rDocAdd.Author,
|
Author: rDocAdd.Author,
|
||||||
Md5: fileHash,
|
Md5: fileHash,
|
||||||
Filepath: &fileName,
|
Filepath: &fileName,
|
||||||
Words: wordCount,
|
Words: &wordCount,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("UpsertDocument DB Error: ", err)
|
log.Error("UpsertDocument DB Error: ", err)
|
||||||
sendDownloadMessage("Unable to save to database", gin.H{"Error": true})
|
sendDownloadMessage("Unable to save to database", gin.H{"Error": true})
|
||||||
@ -856,7 +917,7 @@ func (api *API) appSaveNewDocument(c *gin.Context) {
|
|||||||
sendDownloadMessage("Download Success", gin.H{
|
sendDownloadMessage("Download Success", gin.H{
|
||||||
"Progress": 100,
|
"Progress": 100,
|
||||||
"ButtonText": "Go to Book",
|
"ButtonText": "Go to Book",
|
||||||
"ButtonHref": fmt.Sprintf("./documents/%s", *partialMD5),
|
"ButtonHref": fmt.Sprintf("./documents/%s", partialMD5),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -864,14 +925,14 @@ func (api *API) appEditSettings(c *gin.Context) {
|
|||||||
var rUserSettings requestSettingsEdit
|
var rUserSettings requestSettingsEdit
|
||||||
if err := c.ShouldBind(&rUserSettings); err != nil {
|
if err := c.ShouldBind(&rUserSettings); err != nil {
|
||||||
log.Error("Invalid Form Bind")
|
log.Error("Invalid Form Bind")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Something Exists
|
// Validate Something Exists
|
||||||
if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.TimeOffset == nil {
|
if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.TimeOffset == nil {
|
||||||
log.Error("Missing Form Values")
|
log.Error("Missing Form Values")
|
||||||
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -962,7 +1023,7 @@ func (api *API) getDocumentsWordCount(documents []database.GetDocumentsWithStats
|
|||||||
} else {
|
} else {
|
||||||
if _, err := qtx.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
if _, err := qtx.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
||||||
ID: item.ID,
|
ID: item.ID,
|
||||||
Words: wordCount,
|
Words: &wordCount,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("UpsertDocument DB Error: ", err)
|
log.Error("UpsertDocument DB Error: ", err)
|
||||||
return err
|
return err
|
||||||
|
@ -95,7 +95,7 @@ func (api *API) createGetCoverHandler(errorFunc func(*gin.Context, int, string))
|
|||||||
var coverFile string = "UNKNOWN"
|
var coverFile string = "UNKNOWN"
|
||||||
|
|
||||||
// Identify Documents & Save Covers
|
// Identify Documents & Save Covers
|
||||||
metadataResults, err := metadata.SearchMetadata(metadata.SOURCE_GBOOK, metadata.MetadataInfo{
|
metadataResults, err := metadata.SearchMetadata(metadata.GBOOK, metadata.MetadataInfo{
|
||||||
Title: document.Title,
|
Title: document.Title,
|
||||||
Author: document.Author,
|
Author: document.Author,
|
||||||
})
|
})
|
||||||
|
@ -10,10 +10,13 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/gabriel-vasile/mimetype"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
"reichard.io/antholume/database"
|
"reichard.io/antholume/database"
|
||||||
"reichard.io/antholume/metadata"
|
"reichard.io/antholume/metadata"
|
||||||
)
|
)
|
||||||
@ -453,11 +456,21 @@ func (api *API) koUploadExistingDocument(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Open Form File
|
|
||||||
fileData, err := c.FormFile("file")
|
fileData, err := c.FormFile("file")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Error:", err)
|
log.Error("File Error:", err)
|
||||||
apiErrorPage(c, http.StatusBadRequest, "File error")
|
apiErrorPage(c, http.StatusBadRequest, "File Error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate Type & Derive Extension on MIME
|
||||||
|
uploadedFile, err := fileData.Open()
|
||||||
|
fileMime, err := mimetype.DetectReader(uploadedFile)
|
||||||
|
fileExtension := fileMime.Extension()
|
||||||
|
|
||||||
|
if !slices.Contains([]string{".epub", ".html"}, fileExtension) {
|
||||||
|
log.Error("Invalid FileType:", fileExtension)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Filetype")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -469,29 +482,25 @@ func (api *API) koUploadExistingDocument(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Open File
|
|
||||||
uploadedFile, err := fileData.Open()
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Unable to open file")
|
|
||||||
apiErrorPage(c, http.StatusBadRequest, "Unable to open file")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check Support
|
|
||||||
docType, err := metadata.GetDocumentTypeReader(uploadedFile)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Unsupported file")
|
|
||||||
apiErrorPage(c, http.StatusBadRequest, "Unsupported file")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Derive Filename
|
// Derive Filename
|
||||||
fileName := deriveBaseFileName(&metadata.MetadataInfo{
|
var fileName string
|
||||||
Type: *docType,
|
if document.Author != nil {
|
||||||
PartialMD5: &document.ID,
|
fileName = fileName + *document.Author
|
||||||
Title: document.Title,
|
} else {
|
||||||
Author: document.Author,
|
fileName = fileName + "Unknown"
|
||||||
})
|
}
|
||||||
|
|
||||||
|
if document.Title != nil {
|
||||||
|
fileName = fileName + " - " + *document.Title
|
||||||
|
} else {
|
||||||
|
fileName = fileName + " - Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Slashes
|
||||||
|
fileName = strings.ReplaceAll(fileName, "/", "")
|
||||||
|
|
||||||
|
// Derive & Sanitize File Name
|
||||||
|
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension))
|
||||||
|
|
||||||
// Generate Storage Path
|
// Generate Storage Path
|
||||||
safePath := filepath.Join(api.cfg.DataPath, "documents", fileName)
|
safePath := filepath.Join(api.cfg.DataPath, "documents", fileName)
|
||||||
@ -507,20 +516,28 @@ func (api *API) koUploadExistingDocument(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Acquire Metadata
|
// Get MD5 Hash
|
||||||
metadataInfo, err := metadata.GetMetadata(safePath)
|
fileHash, err := getFileMD5(safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Unable to acquire metadata: %v", err)
|
log.Error("Hash Failure:", err)
|
||||||
apiErrorPage(c, http.StatusBadRequest, "Unable to acquire metadata")
|
apiErrorPage(c, http.StatusBadRequest, "File Error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Word Count
|
||||||
|
wordCount, err := metadata.GetWordCount(safePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Word Count Failure:", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "File Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Document
|
// Upsert Document
|
||||||
if _, err = api.db.Queries.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
if _, err = api.db.Queries.UpsertDocument(api.db.Ctx, database.UpsertDocumentParams{
|
||||||
ID: document.ID,
|
ID: document.ID,
|
||||||
Md5: metadataInfo.MD5,
|
Md5: fileHash,
|
||||||
Words: metadataInfo.WordCount,
|
|
||||||
Filepath: &fileName,
|
Filepath: &fileName,
|
||||||
|
Words: &wordCount,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
apiErrorPage(c, http.StatusBadRequest, "Document Error")
|
apiErrorPage(c, http.StatusBadRequest, "Document Error")
|
||||||
|
22
api/utils.go
22
api/utils.go
@ -4,13 +4,10 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"path/filepath"
|
|
||||||
"reflect"
|
"reflect"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"reichard.io/antholume/database"
|
"reichard.io/antholume/database"
|
||||||
"reichard.io/antholume/graph"
|
"reichard.io/antholume/graph"
|
||||||
"reichard.io/antholume/metadata"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type UTCOffset struct {
|
type UTCOffset struct {
|
||||||
@ -147,22 +144,3 @@ func fields(value interface{}) (map[string]interface{}, error) {
|
|||||||
}
|
}
|
||||||
return m, nil
|
return m, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func deriveBaseFileName(metadataInfo *metadata.MetadataInfo) string {
|
|
||||||
// Derive New FileName
|
|
||||||
var newFileName string
|
|
||||||
if *metadataInfo.Author != "" {
|
|
||||||
newFileName = newFileName + *metadataInfo.Author
|
|
||||||
} else {
|
|
||||||
newFileName = newFileName + "Unknown"
|
|
||||||
}
|
|
||||||
if *metadataInfo.Title != "" {
|
|
||||||
newFileName = newFileName + " - " + *metadataInfo.Title
|
|
||||||
} else {
|
|
||||||
newFileName = newFileName + " - Unknown"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove Slashes
|
|
||||||
fileName := strings.ReplaceAll(newFileName, "/", "")
|
|
||||||
return "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, *metadataInfo.PartialMD5, metadataInfo.Type))
|
|
||||||
}
|
|
||||||
|
@ -1,35 +1,12 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import "testing"
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestNiceSeconds(t *testing.T) {
|
func TestNiceSeconds(t *testing.T) {
|
||||||
wantOne := "22d 7h 39m 31s"
|
want := "22d 7h 39m 31s"
|
||||||
wantNA := "N/A"
|
nice := niceSeconds(1928371)
|
||||||
|
|
||||||
niceOne := niceSeconds(1928371)
|
if nice != want {
|
||||||
niceNA := niceSeconds(0)
|
t.Fatalf(`Expected: %v, Got: %v`, want, nice)
|
||||||
|
}
|
||||||
assert.Equal(t, wantOne, niceOne, "should be nice seconds")
|
|
||||||
assert.Equal(t, wantNA, niceNA, "should be nice NA")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNiceNumbers(t *testing.T) {
|
|
||||||
wantMillions := "198M"
|
|
||||||
wantThousands := "19.8k"
|
|
||||||
wantThousandsTwo := "1.98k"
|
|
||||||
wantZero := "0"
|
|
||||||
|
|
||||||
niceMillions := niceNumbers(198236461)
|
|
||||||
niceThousands := niceNumbers(19823)
|
|
||||||
niceThousandsTwo := niceNumbers(1984)
|
|
||||||
niceZero := niceNumbers(0)
|
|
||||||
|
|
||||||
assert.Equal(t, wantMillions, niceMillions, "should be nice millions")
|
|
||||||
assert.Equal(t, wantThousands, niceThousands, "should be nice thousands")
|
|
||||||
assert.Equal(t, wantThousandsTwo, niceThousandsTwo, "should be nice thousands")
|
|
||||||
assert.Equal(t, wantZero, niceZero, "should be nice zero")
|
|
||||||
}
|
}
|
||||||
|
2
assets/lib/epub.min.js
vendored
2
assets/lib/epub.min.js
vendored
File diff suppressed because one or more lines are too long
@ -1,4 +1,4 @@
|
|||||||
<!doctype html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8" />
|
<meta charset="utf-8" />
|
||||||
|
@ -97,18 +97,16 @@ class EBookReader {
|
|||||||
flow: "paginated",
|
flow: "paginated",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
height: "100%",
|
height: "100%",
|
||||||
allowScriptedContent: true,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Setup Reader
|
// Setup Reader
|
||||||
this.book.ready.then(this.setupReader.bind(this));
|
this.book.ready.then(this.setupReader.bind(this));
|
||||||
|
|
||||||
// Initialize
|
// Initialize
|
||||||
this.initCSP();
|
|
||||||
this.initDevice();
|
this.initDevice();
|
||||||
this.initWakeLock();
|
this.initWakeLock();
|
||||||
this.initThemes();
|
this.initThemes();
|
||||||
this.initViewerListeners();
|
this.initRenditionListeners();
|
||||||
this.initDocumentListeners();
|
this.initDocumentListeners();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -281,36 +279,6 @@ class EBookReader {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* EpubJS will set iframe sandbox when settings "allowScriptedContent: false".
|
|
||||||
* However, Safari completely blocks us from attaching listeners to the iframe
|
|
||||||
* document. So instead we just inject a restrictive CSP rule.
|
|
||||||
*
|
|
||||||
* This effectively blocks all script content within the iframe while still
|
|
||||||
* allowing us to attach listeners to the iframe document.
|
|
||||||
**/
|
|
||||||
initCSP() {
|
|
||||||
// Derive CSP Host
|
|
||||||
var protocol = document.location.protocol;
|
|
||||||
var host = document.location.host;
|
|
||||||
var cspURL = `${protocol}//${host}`;
|
|
||||||
|
|
||||||
// Add CSP Policy
|
|
||||||
this.book.spine.hooks.content.register((output, section) => {
|
|
||||||
let cspWrapper = document.createElement("div");
|
|
||||||
cspWrapper.innerHTML = `
|
|
||||||
<meta
|
|
||||||
http-equiv="Content-Security-Policy"
|
|
||||||
content="require-trusted-types-for 'script';
|
|
||||||
style-src 'self' blob: 'unsafe-inline' ${cspURL};
|
|
||||||
object-src 'none';
|
|
||||||
script-src 'none';"
|
|
||||||
>`;
|
|
||||||
let cspMeta = cspWrapper.children[0];
|
|
||||||
output.head.append(cspMeta);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set theme & meta theme color
|
* Set theme & meta theme color
|
||||||
**/
|
**/
|
||||||
@ -403,9 +371,9 @@ class EBookReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Viewer Listeners
|
* Rendition hooks
|
||||||
**/
|
**/
|
||||||
initViewerListeners() {
|
initRenditionListeners() {
|
||||||
/**
|
/**
|
||||||
* Initiate the debounce when the given function returns true.
|
* Initiate the debounce when the given function returns true.
|
||||||
* Don't run it again until the timeout lapses.
|
* Don't run it again until the timeout lapses.
|
||||||
@ -433,52 +401,15 @@ class EBookReader {
|
|||||||
let bottomBar = document.querySelector("#bottom-bar");
|
let bottomBar = document.querySelector("#bottom-bar");
|
||||||
|
|
||||||
// Local Functions
|
// Local Functions
|
||||||
|
let getCFIFromXPath = this.getCFIFromXPath.bind(this);
|
||||||
|
let setPosition = this.setPosition.bind(this);
|
||||||
let nextPage = this.nextPage.bind(this);
|
let nextPage = this.nextPage.bind(this);
|
||||||
let prevPage = this.prevPage.bind(this);
|
let prevPage = this.prevPage.bind(this);
|
||||||
|
let saveSettings = this.saveSettings.bind(this);
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// Local Vars
|
||||||
// ----------------- Swipe Helpers ---------------- //
|
let readerSettings = this.readerSettings;
|
||||||
// ------------------------------------------------ //
|
let bookState = this.bookState;
|
||||||
let touchStartX,
|
|
||||||
touchStartY,
|
|
||||||
touchEndX,
|
|
||||||
touchEndY = undefined;
|
|
||||||
|
|
||||||
function handleGesture(event) {
|
|
||||||
let drasticity = 75;
|
|
||||||
|
|
||||||
// Swipe Down
|
|
||||||
if (touchEndY - drasticity > touchStartY) {
|
|
||||||
return handleSwipeDown();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swipe Up
|
|
||||||
if (touchEndY + drasticity < touchStartY) {
|
|
||||||
// Prioritize Down & Up Swipes
|
|
||||||
return handleSwipeUp();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swipe Left
|
|
||||||
if (touchEndX + drasticity < touchStartX) {
|
|
||||||
nextPage();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swipe Right
|
|
||||||
if (touchEndX - drasticity > touchStartX) {
|
|
||||||
prevPage();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleSwipeDown() {
|
|
||||||
if (bottomBar.classList.contains("bottom-0"))
|
|
||||||
bottomBar.classList.remove("bottom-0");
|
|
||||||
else topBar.classList.add("top-0");
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleSwipeUp() {
|
|
||||||
if (topBar.classList.contains("top-0")) topBar.classList.remove("top-0");
|
|
||||||
else bottomBar.classList.add("bottom-0");
|
|
||||||
}
|
|
||||||
|
|
||||||
this.rendition.hooks.render.register(function (doc, data) {
|
this.rendition.hooks.render.register(function (doc, data) {
|
||||||
let renderDoc = doc.document;
|
let renderDoc = doc.document;
|
||||||
@ -487,14 +418,66 @@ class EBookReader {
|
|||||||
// ---------------- Wake Lock Hack ---------------- //
|
// ---------------- Wake Lock Hack ---------------- //
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
let wakeLockListener = function () {
|
let wakeLockListener = function () {
|
||||||
renderDoc.dispatchEvent(new CustomEvent("wakelock"));
|
doc.window.parent.document.dispatchEvent(new CustomEvent("wakelock"));
|
||||||
};
|
};
|
||||||
renderDoc.addEventListener("click", wakeLockListener);
|
renderDoc.addEventListener("click", wakeLockListener);
|
||||||
renderDoc.addEventListener("gesturechange", wakeLockListener);
|
renderDoc.addEventListener("gesturechange", wakeLockListener);
|
||||||
renderDoc.addEventListener("touchstart", wakeLockListener);
|
renderDoc.addEventListener("touchstart", wakeLockListener);
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
// --------------- Bars & Page Turn --------------- //
|
// --------------- Swipe Pagination --------------- //
|
||||||
|
// ------------------------------------------------ //
|
||||||
|
let touchStartX,
|
||||||
|
touchStartY,
|
||||||
|
touchEndX,
|
||||||
|
touchEndY = undefined;
|
||||||
|
|
||||||
|
renderDoc.addEventListener(
|
||||||
|
"touchstart",
|
||||||
|
function (event) {
|
||||||
|
touchStartX = event.changedTouches[0].screenX;
|
||||||
|
touchStartY = event.changedTouches[0].screenY;
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
renderDoc.addEventListener(
|
||||||
|
"touchend",
|
||||||
|
function (event) {
|
||||||
|
touchEndX = event.changedTouches[0].screenX;
|
||||||
|
touchEndY = event.changedTouches[0].screenY;
|
||||||
|
handleGesture(event);
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
function handleGesture(event) {
|
||||||
|
let drasticity = 75;
|
||||||
|
|
||||||
|
// Swipe Down
|
||||||
|
if (touchEndY - drasticity > touchStartY) {
|
||||||
|
return handleSwipeDown();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Swipe Up
|
||||||
|
if (touchEndY + drasticity < touchStartY) {
|
||||||
|
// Prioritize Down & Up Swipes
|
||||||
|
return handleSwipeUp();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Swipe Left
|
||||||
|
if (touchEndX + drasticity < touchStartX) {
|
||||||
|
nextPage();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Swipe Right
|
||||||
|
if (touchEndX - drasticity > touchStartX) {
|
||||||
|
prevPage();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ------------------------------------------------ //
|
||||||
|
// --------------- Bottom & Top Bar --------------- //
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
"click",
|
"click",
|
||||||
@ -546,25 +529,45 @@ class EBookReader {
|
|||||||
}, 400),
|
}, 400),
|
||||||
);
|
);
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
function handleSwipeDown() {
|
||||||
// ------------------- Gestures ------------------- //
|
if (bottomBar.classList.contains("bottom-0"))
|
||||||
// ------------------------------------------------ //
|
bottomBar.classList.remove("bottom-0");
|
||||||
|
else topBar.classList.add("top-0");
|
||||||
|
}
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
function handleSwipeUp() {
|
||||||
"touchstart",
|
if (topBar.classList.contains("top-0"))
|
||||||
function (event) {
|
topBar.classList.remove("top-0");
|
||||||
touchStartX = event.changedTouches[0].screenX;
|
else bottomBar.classList.add("bottom-0");
|
||||||
touchStartY = event.changedTouches[0].screenY;
|
}
|
||||||
},
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
|
|
||||||
|
// ------------------------------------------------ //
|
||||||
|
// -------------- Keyboard Shortcuts -------------- //
|
||||||
|
// ------------------------------------------------ //
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
"touchend",
|
"keyup",
|
||||||
function (event) {
|
function (e) {
|
||||||
touchEndX = event.changedTouches[0].screenX;
|
// Left Key (Previous Page)
|
||||||
touchEndY = event.changedTouches[0].screenY;
|
if ((e.keyCode || e.which) == 37) {
|
||||||
handleGesture(event);
|
prevPage();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Right Key (Next Page)
|
||||||
|
if ((e.keyCode || e.which) == 39) {
|
||||||
|
nextPage();
|
||||||
|
}
|
||||||
|
|
||||||
|
// "t" Key (Theme Cycle)
|
||||||
|
if ((e.keyCode || e.which) == 84) {
|
||||||
|
let currentThemeIdx = THEMES.indexOf(
|
||||||
|
readerSettings.theme.colorScheme,
|
||||||
|
);
|
||||||
|
let colorScheme =
|
||||||
|
THEMES.length == currentThemeIdx + 1
|
||||||
|
? THEMES[0]
|
||||||
|
: THEMES[currentThemeIdx + 1];
|
||||||
|
setTheme({ colorScheme });
|
||||||
|
}
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
@ -581,9 +584,7 @@ class EBookReader {
|
|||||||
let nextPage = this.nextPage.bind(this);
|
let nextPage = this.nextPage.bind(this);
|
||||||
let prevPage = this.prevPage.bind(this);
|
let prevPage = this.prevPage.bind(this);
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// Keyboard Shortcuts
|
||||||
// -------------- Keyboard Shortcuts -------------- //
|
|
||||||
// ------------------------------------------------ //
|
|
||||||
document.addEventListener(
|
document.addEventListener(
|
||||||
"keyup",
|
"keyup",
|
||||||
function (e) {
|
function (e) {
|
||||||
|
@ -118,7 +118,6 @@ func (c *Config) EnsureDirectories() {
|
|||||||
docDir := filepath.Join(c.DataPath, "documents")
|
docDir := filepath.Join(c.DataPath, "documents")
|
||||||
coversDir := filepath.Join(c.DataPath, "covers")
|
coversDir := filepath.Join(c.DataPath, "covers")
|
||||||
backupDir := filepath.Join(c.DataPath, "backups")
|
backupDir := filepath.Join(c.DataPath, "backups")
|
||||||
|
|
||||||
os.Mkdir(docDir, 0755)
|
os.Mkdir(docDir, 0755)
|
||||||
os.Mkdir(coversDir, 0755)
|
os.Mkdir(coversDir, 0755)
|
||||||
os.Mkdir(backupDir, 0755)
|
os.Mkdir(backupDir, 0755)
|
||||||
|
@ -1,37 +1,35 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import "testing"
|
||||||
"runtime"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestLoadConfig(t *testing.T) {
|
func TestLoadConfig(t *testing.T) {
|
||||||
conf := Load()
|
conf := Load()
|
||||||
assert.Equal(t, "sqlite", conf.DBType)
|
want := "sqlite"
|
||||||
|
if conf.DBType != want {
|
||||||
|
t.Fatalf(`Load().DBType = %q, want match for %#q, nil`, conf.DBType, want)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetEnvDefault(t *testing.T) {
|
func TestGetEnvDefault(t *testing.T) {
|
||||||
desiredValue := "def_val"
|
want := "def_val"
|
||||||
envDefault := getEnv("DEFAULT_TEST", desiredValue)
|
envDefault := getEnv("DEFAULT_TEST", want)
|
||||||
|
if envDefault != want {
|
||||||
|
t.Fatalf(`getEnv("DEFAULT_TEST", "def_val") = %q, want match for %#q, nil`, envDefault, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(t, desiredValue, envDefault)
|
func TestGetEnvSet(t *testing.T) {
|
||||||
|
envDefault := getEnv("SET_TEST", "not_this")
|
||||||
|
want := "set_val"
|
||||||
|
if envDefault != want {
|
||||||
|
t.Fatalf(`getEnv("SET_TEST", "not_this") = %q, want match for %#q, nil`, envDefault, want)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTrimLowerString(t *testing.T) {
|
func TestTrimLowerString(t *testing.T) {
|
||||||
desiredValue := "trimtest"
|
want := "trimtest"
|
||||||
outputValue := trimLowerString(" trimTest ")
|
output := trimLowerString(" trimTest ")
|
||||||
|
if output != want {
|
||||||
assert.Equal(t, desiredValue, outputValue)
|
t.Fatalf(`trimLowerString(" trimTest ") = %q, want match for %#q, nil`, output, want)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPrettyCaller(t *testing.T) {
|
|
||||||
p, _, _, _ := runtime.Caller(0)
|
|
||||||
result := runtime.CallersFrames([]uintptr{p})
|
|
||||||
f, _ := result.Next()
|
|
||||||
functionName, fileName := prettyCaller(&f)
|
|
||||||
|
|
||||||
assert.Equal(t, "TestPrettyCaller", functionName, "should have current function name")
|
|
||||||
assert.Equal(t, "config/config_test.go@30", fileName, "should have current file path and line number")
|
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"reichard.io/antholume/config"
|
"reichard.io/antholume/config"
|
||||||
"reichard.io/antholume/utils"
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
@ -29,7 +28,9 @@ func TestNewMgr(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dbm := NewMgr(&cfg)
|
dbm := NewMgr(&cfg)
|
||||||
assert.NotNil(t, dbm, "should not be nil dbm")
|
if dbm == nil {
|
||||||
|
t.Fatalf(`Expected: *DBManager, Got: nil`)
|
||||||
|
}
|
||||||
|
|
||||||
t.Run("Database", func(t *testing.T) {
|
t.Run("Database", func(t *testing.T) {
|
||||||
dt := databaseTest{t, dbm}
|
dt := databaseTest{t, dbm}
|
||||||
@ -45,7 +46,9 @@ func (dt *databaseTest) TestUser() {
|
|||||||
dt.Run("User", func(t *testing.T) {
|
dt.Run("User", func(t *testing.T) {
|
||||||
// Generate Auth Hash
|
// Generate Auth Hash
|
||||||
rawAuthHash, err := utils.GenerateToken(64)
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
assert.Nil(t, err, "should be nil err")
|
if err != nil {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, nil, err, err)
|
||||||
|
}
|
||||||
|
|
||||||
authHash := fmt.Sprintf("%x", rawAuthHash)
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
changed, err := dt.dbm.Queries.CreateUser(dt.dbm.Ctx, CreateUserParams{
|
changed, err := dt.dbm.Queries.CreateUser(dt.dbm.Ctx, CreateUserParams{
|
||||||
@ -54,13 +57,14 @@ func (dt *databaseTest) TestUser() {
|
|||||||
AuthHash: &authHash,
|
AuthHash: &authHash,
|
||||||
})
|
})
|
||||||
|
|
||||||
assert.Nil(t, err, "should be nil err")
|
if err != nil || changed != 1 {
|
||||||
assert.Equal(t, int64(1), changed)
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, 1, changed, err)
|
||||||
|
}
|
||||||
|
|
||||||
user, err := dt.dbm.Queries.GetUser(dt.dbm.Ctx, userID)
|
user, err := dt.dbm.Queries.GetUser(dt.dbm.Ctx, userID)
|
||||||
|
if err != nil || *user.Pass != userPass {
|
||||||
assert.Nil(t, err, "should be nil err")
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, userPass, *user.Pass, err)
|
||||||
assert.Equal(t, userPass, *user.Pass)
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
4
go.mod
4
go.mod
@ -27,7 +27,6 @@ require (
|
|||||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||||
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
@ -40,7 +39,6 @@ require (
|
|||||||
github.com/gorilla/securecookie v1.1.2 // indirect
|
github.com/gorilla/securecookie v1.1.2 // indirect
|
||||||
github.com/gorilla/sessions v1.2.2 // indirect
|
github.com/gorilla/sessions v1.2.2 // indirect
|
||||||
github.com/itchyny/timefmt-go v0.1.5 // indirect
|
github.com/itchyny/timefmt-go v0.1.5 // indirect
|
||||||
github.com/jarcoal/httpmock v1.3.1 // indirect
|
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
|
||||||
@ -49,11 +47,9 @@ require (
|
|||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.1.1 // indirect
|
github.com/pelletier/go-toml/v2 v2.1.1 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||||
github.com/sethvargo/go-retry v0.2.4 // indirect
|
github.com/sethvargo/go-retry v0.2.4 // indirect
|
||||||
github.com/stretchr/testify v1.8.4 // indirect
|
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||||
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect
|
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect
|
||||||
|
2
go.sum
2
go.sum
@ -127,8 +127,6 @@ github.com/jackc/pgx/v5 v5.5.1 h1:5I9etrGkLrN+2XPCsi6XLlV5DITbSL/xBZdmAxFcXPI=
|
|||||||
github.com/jackc/pgx/v5 v5.5.1/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
|
github.com/jackc/pgx/v5 v5.5.1/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
|
||||||
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
|
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
|
||||||
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww=
|
|
||||||
github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
|
|
||||||
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901 h1:rp+c0RAYOWj8l6qbCUTSiRLG/iKnW3K3/QfPPuSsBt4=
|
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901 h1:rp+c0RAYOWj8l6qbCUTSiRLG/iKnW3K3/QfPPuSsBt4=
|
||||||
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak=
|
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak=
|
||||||
github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
|
github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
|
||||||
|
@ -1,110 +0,0 @@
|
|||||||
{
|
|
||||||
"kind": "books#volume",
|
|
||||||
"id": "ZxwpakTv_MIC",
|
|
||||||
"etag": "mhqr3GsebaQ",
|
|
||||||
"selfLink": "https://www.googleapis.com/books/v1/volumes/ZxwpakTv_MIC",
|
|
||||||
"volumeInfo": {
|
|
||||||
"title": "Alice in Wonderland",
|
|
||||||
"authors": [
|
|
||||||
"Lewis Carroll"
|
|
||||||
],
|
|
||||||
"publisher": "The Floating Press",
|
|
||||||
"publishedDate": "2009-01-01",
|
|
||||||
"description": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing.",
|
|
||||||
"industryIdentifiers": [
|
|
||||||
{
|
|
||||||
"type": "ISBN_10",
|
|
||||||
"identifier": "1877527815"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "ISBN_13",
|
|
||||||
"identifier": "9781877527814"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"readingModes": {
|
|
||||||
"text": true,
|
|
||||||
"image": false
|
|
||||||
},
|
|
||||||
"pageCount": 104,
|
|
||||||
"printedPageCount": 112,
|
|
||||||
"printType": "BOOK",
|
|
||||||
"categories": [
|
|
||||||
"Fiction / Classics",
|
|
||||||
"Juvenile Fiction / General"
|
|
||||||
],
|
|
||||||
"averageRating": 5,
|
|
||||||
"ratingsCount": 1,
|
|
||||||
"maturityRating": "NOT_MATURE",
|
|
||||||
"allowAnonLogging": true,
|
|
||||||
"contentVersion": "0.2.3.0.preview.2",
|
|
||||||
"panelizationSummary": {
|
|
||||||
"containsEpubBubbles": false,
|
|
||||||
"containsImageBubbles": false
|
|
||||||
},
|
|
||||||
"imageLinks": {
|
|
||||||
"smallThumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=5&edge=curl&imgtk=AFLRE71e5b-TeAKTiPSvXNUPeUi8rItzur2xSzwH8QU3qjKH0A2opmoq1o5I9RqJFt1BtcCCqILhnYRcB2aFLJmEvom11gx3Qn3PNN1iBLj2H5y2JHjM8wIwGT7iWFQmEn0Od7s6sOdk&source=gbs_api",
|
|
||||||
"thumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=1&edge=curl&imgtk=AFLRE70QORt9J_DmKJgfyf9UEjQkdDMZ0qAu0GP315a1Q4CRS3snEjKnJJO2fYFdxjMwsSpmHoXDFPZbsy4gw-kMvF7lL8LtwxGbJGlfETHw_jbQBKBlKTrneK4XFvvV-EXNrZRgylxj&source=gbs_api",
|
|
||||||
"small": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=2&edge=curl&imgtk=AFLRE70r1pAUt6VhuEEW8vXFhu8LvKln3yj0mdlaWPO4ZQuODLFQnH0fTebKMMX4ANR5i4PtC0oaI48XkwF-EdzlEM1WmUcR5383N4kRMXcta_i9nmb2y38dnh3hObwQW5VoAxbc9psn&source=gbs_api",
|
|
||||||
"medium": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=3&edge=curl&imgtk=AFLRE7019EVuXvhzbhmtbz1QFh-ajB6kTKRHGhqijFf8big_GPRMMdpCdKlklFbkCfXvy8F64t5NKlThUHb3tFP-51bbDXkrVErFbCqKGzGnDSSm8cewqT8HiYDNHqn0hXYnuYvN4vYf&source=gbs_api",
|
|
||||||
"large": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=4&edge=curl&imgtk=AFLRE72I15XZqp_8c8BAj4EskxkdC6nQz8F0Fs6VJhkykwIqfjzwuM34tUSQa3UnMGbx-UYjZjSLmCNFlePS8aR7yy-0UP9BRnYD-h5Qbesnnt_xdOb3u7Wdiobi6VbciNCBwUwbCyeH&source=gbs_api",
|
|
||||||
"extraLarge": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=6&edge=curl&imgtk=AFLRE70rC6ktY6U0K_hqG1HxPl_9hMjpKb10p9DryVIwQgUjoJfWQOjpNA3EQ-5yk167yYDlO27gylqNAdJBYWu7ZHr3GuqkjTDpXjDvzBBppVyWaVNxKwhOz3gfJ-gzM6cC4kLHP26R&source=gbs_api"
|
|
||||||
},
|
|
||||||
"language": "en",
|
|
||||||
"previewLink": "http://books.google.com/books?id=ZxwpakTv_MIC&hl=&source=gbs_api",
|
|
||||||
"infoLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&source=gbs_api",
|
|
||||||
"canonicalVolumeLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC"
|
|
||||||
},
|
|
||||||
"layerInfo": {
|
|
||||||
"layers": [
|
|
||||||
{
|
|
||||||
"layerId": "geo",
|
|
||||||
"volumeAnnotationsVersion": "2"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"saleInfo": {
|
|
||||||
"country": "US",
|
|
||||||
"saleability": "FOR_SALE",
|
|
||||||
"isEbook": true,
|
|
||||||
"listPrice": {
|
|
||||||
"amount": 3.99,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"retailPrice": {
|
|
||||||
"amount": 3.99,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"buyLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&rdid=book-ZxwpakTv_MIC&rdot=1&source=gbs_api",
|
|
||||||
"offers": [
|
|
||||||
{
|
|
||||||
"finskyOfferType": 1,
|
|
||||||
"listPrice": {
|
|
||||||
"amountInMicros": 3990000,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"retailPrice": {
|
|
||||||
"amountInMicros": 3990000,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"giftable": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"accessInfo": {
|
|
||||||
"country": "US",
|
|
||||||
"viewability": "PARTIAL",
|
|
||||||
"embeddable": true,
|
|
||||||
"publicDomain": false,
|
|
||||||
"textToSpeechPermission": "ALLOWED",
|
|
||||||
"epub": {
|
|
||||||
"isAvailable": true,
|
|
||||||
"acsTokenLink": "http://books.google.com/books/download/Alice_in_Wonderland-sample-epub.acsm?id=ZxwpakTv_MIC&format=epub&output=acs4_fulfillment_token&dl_type=sample&source=gbs_api"
|
|
||||||
},
|
|
||||||
"pdf": {
|
|
||||||
"isAvailable": false
|
|
||||||
},
|
|
||||||
"webReaderLink": "http://play.google.com/books/reader?id=ZxwpakTv_MIC&hl=&source=gbs_api",
|
|
||||||
"accessViewStatus": "SAMPLE",
|
|
||||||
"quoteSharingAllowed": false
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,105 +0,0 @@
|
|||||||
{
|
|
||||||
"kind": "books#volumes",
|
|
||||||
"totalItems": 1,
|
|
||||||
"items": [
|
|
||||||
{
|
|
||||||
"kind": "books#volume",
|
|
||||||
"id": "ZxwpakTv_MIC",
|
|
||||||
"etag": "F2eR9VV6VwQ",
|
|
||||||
"selfLink": "https://www.googleapis.com/books/v1/volumes/ZxwpakTv_MIC",
|
|
||||||
"volumeInfo": {
|
|
||||||
"title": "Alice in Wonderland",
|
|
||||||
"authors": [
|
|
||||||
"Lewis Carroll"
|
|
||||||
],
|
|
||||||
"publisher": "The Floating Press",
|
|
||||||
"publishedDate": "2009-01-01",
|
|
||||||
"description": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing.",
|
|
||||||
"industryIdentifiers": [
|
|
||||||
{
|
|
||||||
"type": "ISBN_13",
|
|
||||||
"identifier": "9781877527814"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "ISBN_10",
|
|
||||||
"identifier": "1877527815"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"readingModes": {
|
|
||||||
"text": true,
|
|
||||||
"image": false
|
|
||||||
},
|
|
||||||
"pageCount": 104,
|
|
||||||
"printType": "BOOK",
|
|
||||||
"categories": [
|
|
||||||
"Fiction"
|
|
||||||
],
|
|
||||||
"averageRating": 5,
|
|
||||||
"ratingsCount": 1,
|
|
||||||
"maturityRating": "NOT_MATURE",
|
|
||||||
"allowAnonLogging": true,
|
|
||||||
"contentVersion": "0.2.3.0.preview.2",
|
|
||||||
"panelizationSummary": {
|
|
||||||
"containsEpubBubbles": false,
|
|
||||||
"containsImageBubbles": false
|
|
||||||
},
|
|
||||||
"imageLinks": {
|
|
||||||
"smallThumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=5&edge=curl&source=gbs_api",
|
|
||||||
"thumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=1&edge=curl&source=gbs_api"
|
|
||||||
},
|
|
||||||
"language": "en",
|
|
||||||
"previewLink": "http://books.google.com/books?id=ZxwpakTv_MIC&printsec=frontcover&dq=isbn:1877527815&hl=&cd=1&source=gbs_api",
|
|
||||||
"infoLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&source=gbs_api",
|
|
||||||
"canonicalVolumeLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC"
|
|
||||||
},
|
|
||||||
"saleInfo": {
|
|
||||||
"country": "US",
|
|
||||||
"saleability": "FOR_SALE",
|
|
||||||
"isEbook": true,
|
|
||||||
"listPrice": {
|
|
||||||
"amount": 3.99,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"retailPrice": {
|
|
||||||
"amount": 3.99,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"buyLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&rdid=book-ZxwpakTv_MIC&rdot=1&source=gbs_api",
|
|
||||||
"offers": [
|
|
||||||
{
|
|
||||||
"finskyOfferType": 1,
|
|
||||||
"listPrice": {
|
|
||||||
"amountInMicros": 3990000,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"retailPrice": {
|
|
||||||
"amountInMicros": 3990000,
|
|
||||||
"currencyCode": "USD"
|
|
||||||
},
|
|
||||||
"giftable": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"accessInfo": {
|
|
||||||
"country": "US",
|
|
||||||
"viewability": "PARTIAL",
|
|
||||||
"embeddable": true,
|
|
||||||
"publicDomain": false,
|
|
||||||
"textToSpeechPermission": "ALLOWED",
|
|
||||||
"epub": {
|
|
||||||
"isAvailable": true,
|
|
||||||
"acsTokenLink": "http://books.google.com/books/download/Alice_in_Wonderland-sample-epub.acsm?id=ZxwpakTv_MIC&format=epub&output=acs4_fulfillment_token&dl_type=sample&source=gbs_api"
|
|
||||||
},
|
|
||||||
"pdf": {
|
|
||||||
"isAvailable": false
|
|
||||||
},
|
|
||||||
"webReaderLink": "http://play.google.com/books/reader?id=ZxwpakTv_MIC&hl=&source=gbs_api",
|
|
||||||
"accessViewStatus": "SAMPLE",
|
|
||||||
"quoteSharingAllowed": false
|
|
||||||
},
|
|
||||||
"searchInfo": {
|
|
||||||
"textSnippet": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@ -16,7 +16,6 @@ func getEPUBMetadata(filepath string) (*MetadataInfo, error) {
|
|||||||
rf := rc.Rootfiles[0]
|
rf := rc.Rootfiles[0]
|
||||||
|
|
||||||
parsedMetadata := &MetadataInfo{
|
parsedMetadata := &MetadataInfo{
|
||||||
Type: TYPE_EPUB,
|
|
||||||
Title: &rf.Title,
|
Title: &rf.Title,
|
||||||
Author: &rf.Creator,
|
Author: &rf.Creator,
|
||||||
Description: &rf.Description,
|
Description: &rf.Description,
|
||||||
|
@ -1,130 +0,0 @@
|
|||||||
package metadata
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "embed"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/jarcoal/httpmock"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
// const GBOOKS_QUERY_URL string = "https://www.googleapis.com/books/v1/volumes?q=%s"
|
|
||||||
// const GBOOKS_GBID_INFO_URL string = "https://www.googleapis.com/books/v1/volumes/%s"
|
|
||||||
// const GBOOKS_GBID_COVER_URL string = "https://books.google.com/books/content/images/frontcover/%s?fife=w480-h690"
|
|
||||||
|
|
||||||
//go:embed _test_files/gbooks_id_response.json
|
|
||||||
var idResp string
|
|
||||||
|
|
||||||
//go:embed _test_files/gbooks_query_response.json
|
|
||||||
var queryResp string
|
|
||||||
|
|
||||||
type details struct {
|
|
||||||
URLs []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hook API Helper
|
|
||||||
func hookAPI() *details {
|
|
||||||
// Start HTTPMock
|
|
||||||
httpmock.Activate()
|
|
||||||
|
|
||||||
// Create details struct
|
|
||||||
d := &details{
|
|
||||||
URLs: []string{},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create Hook
|
|
||||||
matchRE := regexp.MustCompile(`^https://www\.googleapis\.com/books/v1/volumes.*`)
|
|
||||||
httpmock.RegisterRegexpResponder("GET", matchRE, func(req *http.Request) (*http.Response, error) {
|
|
||||||
// Append URL
|
|
||||||
d.URLs = append(d.URLs, req.URL.String())
|
|
||||||
|
|
||||||
// Get Raw Response
|
|
||||||
var rawResp string
|
|
||||||
if req.URL.Query().Get("q") != "" {
|
|
||||||
rawResp = queryResp
|
|
||||||
} else {
|
|
||||||
rawResp = idResp
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to JSON Response
|
|
||||||
var responseData map[string]interface{}
|
|
||||||
json.Unmarshal([]byte(rawResp), &responseData)
|
|
||||||
|
|
||||||
// Return Response
|
|
||||||
return httpmock.NewJsonResponse(200, responseData)
|
|
||||||
})
|
|
||||||
|
|
||||||
return d
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGBooksGBIDMetadata(t *testing.T) {
|
|
||||||
hookDetails := hookAPI()
|
|
||||||
defer httpmock.DeactivateAndReset()
|
|
||||||
|
|
||||||
GBID := "ZxwpakTv_MIC"
|
|
||||||
expectedURL := fmt.Sprintf(GBOOKS_GBID_INFO_URL, GBID)
|
|
||||||
metadataResp, err := getGBooksMetadata(MetadataInfo{ID: &GBID})
|
|
||||||
|
|
||||||
assert.Nil(t, err, "should not have error")
|
|
||||||
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
|
|
||||||
assert.Equal(t, 1, len(metadataResp), "should have single result")
|
|
||||||
|
|
||||||
mResult := metadataResp[0]
|
|
||||||
validateResult(t, &mResult)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGBooksISBNQuery(t *testing.T) {
|
|
||||||
hookDetails := hookAPI()
|
|
||||||
defer httpmock.DeactivateAndReset()
|
|
||||||
|
|
||||||
ISBN10 := "1877527815"
|
|
||||||
expectedURL := fmt.Sprintf(GBOOKS_QUERY_URL, "isbn:"+ISBN10)
|
|
||||||
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
|
||||||
ISBN10: &ISBN10,
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Nil(t, err, "should not have error")
|
|
||||||
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
|
|
||||||
assert.Equal(t, 1, len(metadataResp), "should have single result")
|
|
||||||
|
|
||||||
mResult := metadataResp[0]
|
|
||||||
validateResult(t, &mResult)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGBooksTitleQuery(t *testing.T) {
|
|
||||||
hookDetails := hookAPI()
|
|
||||||
defer httpmock.DeactivateAndReset()
|
|
||||||
|
|
||||||
title := "Alice in Wonderland 1877527815"
|
|
||||||
expectedURL := fmt.Sprintf(GBOOKS_QUERY_URL, url.QueryEscape(strings.TrimSpace(title)))
|
|
||||||
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
|
||||||
Title: &title,
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Nil(t, err, "should not have error")
|
|
||||||
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
|
|
||||||
assert.NotEqual(t, 0, len(metadataResp), "should not have no results")
|
|
||||||
|
|
||||||
mResult := metadataResp[0]
|
|
||||||
validateResult(t, &mResult)
|
|
||||||
}
|
|
||||||
|
|
||||||
func validateResult(t *testing.T, m *MetadataInfo) {
|
|
||||||
expectedTitle := "Alice in Wonderland"
|
|
||||||
expectedAuthor := "Lewis Carroll"
|
|
||||||
expectedDesc := "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing."
|
|
||||||
expectedISBN10 := "1877527815"
|
|
||||||
expectedISBN13 := "9781877527814"
|
|
||||||
|
|
||||||
assert.Equal(t, expectedTitle, *m.Title, "should have title")
|
|
||||||
assert.Equal(t, expectedAuthor, *m.Author, "should have author")
|
|
||||||
assert.Equal(t, expectedDesc, *m.Description, "should have description")
|
|
||||||
assert.Equal(t, expectedISBN10, *m.ISBN10, "should have ISBN10")
|
|
||||||
assert.Equal(t, expectedISBN13, *m.ISBN13, "should have ISBN10")
|
|
||||||
}
|
|
76
metadata/integrations_test.go
Normal file
76
metadata/integrations_test.go
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
//go:build integration
|
||||||
|
|
||||||
|
package metadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGBooksGBIDMetadata(t *testing.T) {
|
||||||
|
GBID := "ZxwpakTv_MIC"
|
||||||
|
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
||||||
|
ID: &GBID,
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(metadataResp) != 1 {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, 1, len(metadataResp), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
mResult := metadataResp[0]
|
||||||
|
validateResult(&mResult, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGBooksISBNQuery(t *testing.T) {
|
||||||
|
ISBN10 := "1877527815"
|
||||||
|
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
||||||
|
ISBN10: &ISBN10,
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(metadataResp) != 1 {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, 1, len(metadataResp), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
mResult := metadataResp[0]
|
||||||
|
validateResult(&mResult, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGBooksTitleQuery(t *testing.T) {
|
||||||
|
title := "Alice in Wonderland 1877527815"
|
||||||
|
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
||||||
|
Title: &title,
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(metadataResp) == 0 {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, "> 0", len(metadataResp), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
mResult := metadataResp[0]
|
||||||
|
validateResult(&mResult, t)
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateResult(m *MetadataInfo, t *testing.T) {
|
||||||
|
expect := "Lewis Carroll"
|
||||||
|
if *m.Author != expect {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.Author)
|
||||||
|
}
|
||||||
|
|
||||||
|
expect = "Alice in Wonderland"
|
||||||
|
if *m.Title != expect {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.Title)
|
||||||
|
}
|
||||||
|
|
||||||
|
expect = "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing."
|
||||||
|
if *m.Description != expect {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.Description)
|
||||||
|
}
|
||||||
|
|
||||||
|
expect = "1877527815"
|
||||||
|
if *m.ISBN10 != expect {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.ISBN10)
|
||||||
|
}
|
||||||
|
|
||||||
|
expect = "9781877527814"
|
||||||
|
if *m.ISBN13 != expect {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.ISBN13)
|
||||||
|
}
|
||||||
|
}
|
@ -3,47 +3,27 @@ package metadata
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/gabriel-vasile/mimetype"
|
"github.com/gabriel-vasile/mimetype"
|
||||||
"reichard.io/antholume/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type MetadataHandler func(string) (*MetadataInfo, error)
|
|
||||||
|
|
||||||
type DocumentType string
|
|
||||||
|
|
||||||
const (
|
|
||||||
TYPE_EPUB DocumentType = ".epub"
|
|
||||||
)
|
|
||||||
|
|
||||||
var extensionHandlerMap = map[DocumentType]MetadataHandler{
|
|
||||||
TYPE_EPUB: getEPUBMetadata,
|
|
||||||
}
|
|
||||||
|
|
||||||
type Source int
|
type Source int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
SOURCE_GBOOK Source = iota
|
GBOOK Source = iota
|
||||||
SOURCE_OLIB
|
OLIB
|
||||||
)
|
)
|
||||||
|
|
||||||
type MetadataInfo struct {
|
type MetadataInfo struct {
|
||||||
ID *string
|
ID *string
|
||||||
MD5 *string
|
|
||||||
PartialMD5 *string
|
|
||||||
WordCount *int64
|
|
||||||
|
|
||||||
Title *string
|
Title *string
|
||||||
Author *string
|
Author *string
|
||||||
Description *string
|
Description *string
|
||||||
ISBN10 *string
|
ISBN10 *string
|
||||||
ISBN13 *string
|
ISBN13 *string
|
||||||
Type DocumentType
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Downloads the Google Books cover file and saves it to the provided directory.
|
|
||||||
func CacheCover(gbid string, coverDir string, documentID string, overwrite bool) (*string, error) {
|
func CacheCover(gbid string, coverDir string, documentID string, overwrite bool) (*string, error) {
|
||||||
// Get Filepath
|
// Get Filepath
|
||||||
coverFile := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", documentID))
|
coverFile := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", documentID))
|
||||||
@ -59,12 +39,11 @@ func CacheCover(gbid string, coverDir string, documentID string, overwrite bool)
|
|||||||
return &coverFile, nil
|
return &coverFile, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Searches source for metadata based on the provided information.
|
|
||||||
func SearchMetadata(s Source, metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
func SearchMetadata(s Source, metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
||||||
switch s {
|
switch s {
|
||||||
case SOURCE_GBOOK:
|
case GBOOK:
|
||||||
return getGBooksMetadata(metadataSearch)
|
return getGBooksMetadata(metadataSearch)
|
||||||
case SOURCE_OLIB:
|
case OLIB:
|
||||||
return nil, errors.New("Not implemented")
|
return nil, errors.New("Not implemented")
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("Not implemented")
|
return nil, errors.New("Not implemented")
|
||||||
@ -72,112 +51,32 @@ func SearchMetadata(s Source, metadataSearch MetadataInfo) ([]MetadataInfo, erro
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the word count of the provided filepath. An error will be returned
|
func GetWordCount(filepath string) (int64, error) {
|
||||||
// if the file is not supported.
|
fileMime, err := mimetype.DetectFile(filepath)
|
||||||
func GetWordCount(filepath string) (*int64, error) {
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if fileExtension := fileMime.Extension(); fileExtension == ".epub" {
|
||||||
|
totalWords, err := countEPUBWords(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return totalWords, nil
|
||||||
|
} else {
|
||||||
|
return 0, errors.New("Invalid Extension")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetMetadata(filepath string) (*MetadataInfo, error) {
|
||||||
fileMime, err := mimetype.DetectFile(filepath)
|
fileMime, err := mimetype.DetectFile(filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if fileExtension := fileMime.Extension(); fileExtension == ".epub" {
|
if fileExtension := fileMime.Extension(); fileExtension == ".epub" {
|
||||||
totalWords, err := countEPUBWords(filepath)
|
return getEPUBMetadata(filepath)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &totalWords, nil
|
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("Invalid extension")
|
return nil, errors.New("Invalid Extension")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns embedded metadata of the provided file. An error will be returned if
|
|
||||||
// the file is not supported.
|
|
||||||
func GetMetadata(filepath string) (*MetadataInfo, error) {
|
|
||||||
// Detect Extension Type
|
|
||||||
fileMime, err := mimetype.DetectFile(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get Extension Type Metadata Handler
|
|
||||||
fileExtension := fileMime.Extension()
|
|
||||||
handler, ok := extensionHandlerMap[DocumentType(fileExtension)]
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("invalid extension %s", fileExtension)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Acquire Metadata
|
|
||||||
metadataInfo, err := handler(filepath)
|
|
||||||
|
|
||||||
// Calculate MD5 & Partial MD5
|
|
||||||
partialMD5, err := utils.CalculatePartialMD5(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("unable to calculate partial MD5")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate Actual MD5
|
|
||||||
MD5, err := utils.CalculateMD5(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("unable to calculate MD5")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate Word Count
|
|
||||||
wordCount, err := GetWordCount(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("unable to calculate word count")
|
|
||||||
}
|
|
||||||
|
|
||||||
metadataInfo.WordCount = wordCount
|
|
||||||
metadataInfo.PartialMD5 = partialMD5
|
|
||||||
metadataInfo.MD5 = MD5
|
|
||||||
|
|
||||||
return metadataInfo, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the extension of the provided filepath (e.g. ".epub"). An error
|
|
||||||
// will be returned if the file is not supported.
|
|
||||||
func GetDocumentType(filepath string) (*DocumentType, error) {
|
|
||||||
// Detect Extension Type
|
|
||||||
fileMime, err := mimetype.DetectFile(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect
|
|
||||||
fileExtension := fileMime.Extension()
|
|
||||||
docType, ok := ParseDocumentType(fileExtension)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("filetype not supported")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &docType, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the extension of the provided file reader (e.g. ".epub"). An error
|
|
||||||
// will be returned if the file is not supported.
|
|
||||||
func GetDocumentTypeReader(r io.Reader) (*DocumentType, error) {
|
|
||||||
// Detect Extension Type
|
|
||||||
fileMime, err := mimetype.DetectReader(r)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect
|
|
||||||
fileExtension := fileMime.Extension()
|
|
||||||
docType, ok := ParseDocumentType(fileExtension)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("filetype not supported")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &docType, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given a filetype string, attempt to resolve a DocumentType
|
|
||||||
func ParseDocumentType(input string) (DocumentType, bool) {
|
|
||||||
validTypes := map[string]DocumentType{
|
|
||||||
string(TYPE_EPUB): TYPE_EPUB,
|
|
||||||
}
|
|
||||||
found, ok := validTypes[input]
|
|
||||||
return found, ok
|
|
||||||
}
|
|
||||||
|
@ -1,46 +1,36 @@
|
|||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGetWordCount(t *testing.T) {
|
func TestGetWordCount(t *testing.T) {
|
||||||
var desiredCount int64 = 30080
|
var want int64 = 30080
|
||||||
actualCount, err := countEPUBWords("../_test_files/alice.epub")
|
wordCount, err := countEPUBWords("../_test_files/alice.epub")
|
||||||
|
|
||||||
assert.Nil(t, err, "should have no error")
|
|
||||||
assert.Equal(t, desiredCount, actualCount, "should be correct word count")
|
|
||||||
|
|
||||||
|
if wordCount != want {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, wordCount, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetMetadata(t *testing.T) {
|
func TestGetMetadata(t *testing.T) {
|
||||||
desiredTitle := "Alice's Adventures in Wonderland / Illustrated by Arthur Rackham. With a Proem by Austin Dobson"
|
metadataInfo, err := getEPUBMetadata("../_test_files/alice.epub")
|
||||||
desiredAuthor := "Lewis Carroll"
|
if err != nil {
|
||||||
desiredDescription := ""
|
t.Fatalf(`Expected: *MetadataInfo, Got: nil, Error: %v`, err)
|
||||||
|
}
|
||||||
|
|
||||||
metadataInfo, err := GetMetadata("../_test_files/alice.epub")
|
want := "Alice's Adventures in Wonderland / Illustrated by Arthur Rackham. With a Proem by Austin Dobson"
|
||||||
|
if *metadataInfo.Title != want {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, *metadataInfo.Title, err)
|
||||||
|
}
|
||||||
|
|
||||||
assert.Nil(t, err, "should have no error")
|
want = "Lewis Carroll"
|
||||||
assert.Equal(t, desiredTitle, *metadataInfo.Title, "should be correct title")
|
if *metadataInfo.Author != want {
|
||||||
assert.Equal(t, desiredAuthor, *metadataInfo.Author, "should be correct author")
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, *metadataInfo.Author, err)
|
||||||
assert.Equal(t, desiredDescription, *metadataInfo.Description, "should be correct author")
|
}
|
||||||
assert.Equal(t, TYPE_EPUB, metadataInfo.Type, "should be correct type")
|
|
||||||
}
|
want = ""
|
||||||
|
if *metadataInfo.Description != want {
|
||||||
func TestGetExtension(t *testing.T) {
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, *metadataInfo.Description, err)
|
||||||
docType, err := GetDocumentType("../_test_files/alice.epub")
|
}
|
||||||
|
|
||||||
assert.Nil(t, err, "should have no error")
|
|
||||||
assert.Equal(t, TYPE_EPUB, *docType)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetExtensionReader(t *testing.T) {
|
|
||||||
file, _ := os.Open("../_test_files/alice.epub")
|
|
||||||
docType, err := GetDocumentTypeReader(file)
|
|
||||||
|
|
||||||
assert.Nil(t, err, "should have no error")
|
|
||||||
assert.Equal(t, TYPE_EPUB, *docType)
|
|
||||||
}
|
}
|
||||||
|
@ -8,8 +8,8 @@ import (
|
|||||||
|
|
||||||
// Feed root element for acquisition or navigation feed
|
// Feed root element for acquisition or navigation feed
|
||||||
type Feed struct {
|
type Feed struct {
|
||||||
ID string `xml:"id,omitempty"`
|
|
||||||
XMLName xml.Name `xml:"feed"`
|
XMLName xml.Name `xml:"feed"`
|
||||||
|
ID string `xml:"id,omitempty",`
|
||||||
Title string `xml:"title,omitempty"`
|
Title string `xml:"title,omitempty"`
|
||||||
Updated time.Time `xml:"updated,omitempty"`
|
Updated time.Time `xml:"updated,omitempty"`
|
||||||
Entries []Entry `xml:"entry,omitempty"`
|
Entries []Entry `xml:"entry,omitempty"`
|
||||||
|
@ -1,75 +0,0 @@
|
|||||||
package search
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
)
|
|
||||||
|
|
||||||
func parseAnnasArchiveDownloadURL(body io.ReadCloser) (string, error) {
|
|
||||||
// Parse
|
|
||||||
defer body.Close()
|
|
||||||
doc, _ := goquery.NewDocumentFromReader(body)
|
|
||||||
|
|
||||||
// Return Download URL
|
|
||||||
downloadURL, exists := doc.Find("body > table > tbody > tr > td > a").Attr("href")
|
|
||||||
if exists == false {
|
|
||||||
return "", fmt.Errorf("Download URL not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Possible Funky URL
|
|
||||||
downloadURL = strings.ReplaceAll(downloadURL, "\\", "/")
|
|
||||||
|
|
||||||
return downloadURL, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseAnnasArchive(body io.ReadCloser) ([]SearchItem, error) {
|
|
||||||
// Parse
|
|
||||||
defer body.Close()
|
|
||||||
doc, err := goquery.NewDocumentFromReader(body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize Results
|
|
||||||
var allEntries []SearchItem
|
|
||||||
doc.Find("form > div.w-full > div.w-full > div > div.justify-center").Each(func(ix int, rawBook *goquery.Selection) {
|
|
||||||
// Parse Details
|
|
||||||
details := rawBook.Find("div:nth-child(2) > div:nth-child(1)").Text()
|
|
||||||
detailsSplit := strings.Split(details, ", ")
|
|
||||||
|
|
||||||
// Invalid Details
|
|
||||||
if len(detailsSplit) < 3 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
language := detailsSplit[0]
|
|
||||||
fileType := detailsSplit[1]
|
|
||||||
fileSize := detailsSplit[2]
|
|
||||||
|
|
||||||
// Get Title & Author
|
|
||||||
title := rawBook.Find("h3").Text()
|
|
||||||
author := rawBook.Find("div:nth-child(2) > div:nth-child(4)").Text()
|
|
||||||
|
|
||||||
// Parse MD5
|
|
||||||
itemHref, _ := rawBook.Find("a").Attr("href")
|
|
||||||
hrefArray := strings.Split(itemHref, "/")
|
|
||||||
id := hrefArray[len(hrefArray)-1]
|
|
||||||
|
|
||||||
item := SearchItem{
|
|
||||||
ID: id,
|
|
||||||
Title: title,
|
|
||||||
Author: author,
|
|
||||||
Language: language,
|
|
||||||
FileType: fileType,
|
|
||||||
FileSize: fileSize,
|
|
||||||
}
|
|
||||||
|
|
||||||
allEntries = append(allEntries, item)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Return Results
|
|
||||||
return allEntries, nil
|
|
||||||
}
|
|
@ -1,42 +0,0 @@
|
|||||||
package search
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
)
|
|
||||||
|
|
||||||
func GoodReadsMostRead(c Cadence) ([]SearchItem, error) {
|
|
||||||
body, err := getPage("https://www.goodreads.com/book/most_read?category=all&country=US&duration=" + string(c))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return parseGoodReads(body)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseGoodReads(body io.ReadCloser) ([]SearchItem, error) {
|
|
||||||
// Parse
|
|
||||||
defer body.Close()
|
|
||||||
doc, err := goquery.NewDocumentFromReader(body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize Results
|
|
||||||
var allEntries []SearchItem
|
|
||||||
|
|
||||||
doc.Find("[itemtype=\"http://schema.org/Book\"]").Each(func(ix int, rawBook *goquery.Selection) {
|
|
||||||
title := rawBook.Find(".bookTitle span").Text()
|
|
||||||
author := rawBook.Find(".authorName span").Text()
|
|
||||||
|
|
||||||
item := SearchItem{
|
|
||||||
Title: title,
|
|
||||||
Author: author,
|
|
||||||
}
|
|
||||||
|
|
||||||
allEntries = append(allEntries, item)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Return Results
|
|
||||||
return allEntries, nil
|
|
||||||
}
|
|
123
search/libgen.go
123
search/libgen.go
@ -1,123 +0,0 @@
|
|||||||
package search
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
|
||||||
)
|
|
||||||
|
|
||||||
func parseLibGenFiction(body io.ReadCloser) ([]SearchItem, error) {
|
|
||||||
// Parse
|
|
||||||
defer body.Close()
|
|
||||||
doc, err := goquery.NewDocumentFromReader(body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize Results
|
|
||||||
var allEntries []SearchItem
|
|
||||||
doc.Find("table.catalog tbody > tr").Each(func(ix int, rawBook *goquery.Selection) {
|
|
||||||
|
|
||||||
// Parse File Details
|
|
||||||
fileItem := rawBook.Find("td:nth-child(5)")
|
|
||||||
fileDesc := fileItem.Text()
|
|
||||||
fileDescSplit := strings.Split(fileDesc, "/")
|
|
||||||
fileType := strings.ToLower(strings.TrimSpace(fileDescSplit[0]))
|
|
||||||
fileSize := strings.TrimSpace(fileDescSplit[1])
|
|
||||||
|
|
||||||
// Parse Upload Date
|
|
||||||
uploadedRaw, _ := fileItem.Attr("title")
|
|
||||||
uploadedDateRaw := strings.Split(uploadedRaw, "Uploaded at ")[1]
|
|
||||||
uploadDate, _ := time.Parse("2006-01-02 15:04:05", uploadedDateRaw)
|
|
||||||
|
|
||||||
// Parse MD5
|
|
||||||
editHref, _ := rawBook.Find("td:nth-child(7) a").Attr("href")
|
|
||||||
hrefArray := strings.Split(editHref, "/")
|
|
||||||
id := hrefArray[len(hrefArray)-1]
|
|
||||||
|
|
||||||
// Parse Other Details
|
|
||||||
title := rawBook.Find("td:nth-child(3) p a").Text()
|
|
||||||
author := rawBook.Find(".catalog_authors li a").Text()
|
|
||||||
language := rawBook.Find("td:nth-child(4)").Text()
|
|
||||||
series := rawBook.Find("td:nth-child(2)").Text()
|
|
||||||
|
|
||||||
item := SearchItem{
|
|
||||||
ID: id,
|
|
||||||
Title: title,
|
|
||||||
Author: author,
|
|
||||||
Series: series,
|
|
||||||
Language: language,
|
|
||||||
FileType: fileType,
|
|
||||||
FileSize: fileSize,
|
|
||||||
UploadDate: uploadDate.Format(time.RFC3339),
|
|
||||||
}
|
|
||||||
|
|
||||||
allEntries = append(allEntries, item)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Return Results
|
|
||||||
return allEntries, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseLibGenNonFiction(body io.ReadCloser) ([]SearchItem, error) {
|
|
||||||
// Parse
|
|
||||||
defer body.Close()
|
|
||||||
doc, err := goquery.NewDocumentFromReader(body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize Results
|
|
||||||
var allEntries []SearchItem
|
|
||||||
doc.Find("table.c tbody > tr:nth-child(n + 2)").Each(func(ix int, rawBook *goquery.Selection) {
|
|
||||||
|
|
||||||
// Parse Type & Size
|
|
||||||
fileSize := strings.ToLower(strings.TrimSpace(rawBook.Find("td:nth-child(8)").Text()))
|
|
||||||
fileType := strings.ToLower(strings.TrimSpace(rawBook.Find("td:nth-child(9)").Text()))
|
|
||||||
|
|
||||||
// Parse MD5
|
|
||||||
titleRaw := rawBook.Find("td:nth-child(3) [id]")
|
|
||||||
editHref, _ := titleRaw.Attr("href")
|
|
||||||
hrefArray := strings.Split(editHref, "?md5=")
|
|
||||||
id := hrefArray[1]
|
|
||||||
|
|
||||||
// Parse Other Details
|
|
||||||
title := titleRaw.Text()
|
|
||||||
author := rawBook.Find("td:nth-child(2)").Text()
|
|
||||||
language := rawBook.Find("td:nth-child(7)").Text()
|
|
||||||
series := rawBook.Find("td:nth-child(3) [href*='column=series']").Text()
|
|
||||||
|
|
||||||
item := SearchItem{
|
|
||||||
ID: id,
|
|
||||||
Title: title,
|
|
||||||
Author: author,
|
|
||||||
Series: series,
|
|
||||||
Language: language,
|
|
||||||
FileType: fileType,
|
|
||||||
FileSize: fileSize,
|
|
||||||
}
|
|
||||||
|
|
||||||
allEntries = append(allEntries, item)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Return Results
|
|
||||||
return allEntries, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseLibGenDownloadURL(body io.ReadCloser) (string, error) {
|
|
||||||
// Parse
|
|
||||||
defer body.Close()
|
|
||||||
doc, _ := goquery.NewDocumentFromReader(body)
|
|
||||||
|
|
||||||
// Return Download URL
|
|
||||||
// downloadURL, _ := doc.Find("#download [href*=cloudflare]").Attr("href")
|
|
||||||
downloadURL, exists := doc.Find("#download h2 a").Attr("href")
|
|
||||||
if exists == false {
|
|
||||||
return "", fmt.Errorf("Download URL not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
return downloadURL, nil
|
|
||||||
}
|
|
225
search/search.go
225
search/search.go
@ -2,13 +2,16 @@ package search
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -99,14 +102,14 @@ func SaveBook(id string, source Source) (string, error) {
|
|||||||
bookURL, err := def.parseDownloadFunc(body)
|
bookURL, err := def.parseDownloadFunc(body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Parse Download URL Error: ", err)
|
log.Error("Parse Download URL Error: ", err)
|
||||||
return "", fmt.Errorf("Download Failure")
|
return "", errors.New("Download Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create File
|
// Create File
|
||||||
tempFile, err := os.CreateTemp("", "book")
|
tempFile, err := os.CreateTemp("", "book")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("File Create Error: ", err)
|
log.Error("File Create Error: ", err)
|
||||||
return "", fmt.Errorf("File Failure")
|
return "", errors.New("File Failure")
|
||||||
}
|
}
|
||||||
defer tempFile.Close()
|
defer tempFile.Close()
|
||||||
|
|
||||||
@ -116,7 +119,7 @@ func SaveBook(id string, source Source) (string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
os.Remove(tempFile.Name())
|
os.Remove(tempFile.Name())
|
||||||
log.Error("Book URL API Failure: ", err)
|
log.Error("Book URL API Failure: ", err)
|
||||||
return "", fmt.Errorf("API Failure")
|
return "", errors.New("API Failure")
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
@ -126,12 +129,20 @@ func SaveBook(id string, source Source) (string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
os.Remove(tempFile.Name())
|
os.Remove(tempFile.Name())
|
||||||
log.Error("File Copy Error: ", err)
|
log.Error("File Copy Error: ", err)
|
||||||
return "", fmt.Errorf("File Failure")
|
return "", errors.New("File Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
return tempFile.Name(), nil
|
return tempFile.Name(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GoodReadsMostRead(c Cadence) ([]SearchItem, error) {
|
||||||
|
body, err := getPage("https://www.goodreads.com/book/most_read?category=all&country=US&duration=" + string(c))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return parseGoodReads(body)
|
||||||
|
}
|
||||||
|
|
||||||
func GetBookURL(id string, bookType BookType) (string, error) {
|
func GetBookURL(id string, bookType BookType) (string, error) {
|
||||||
// Derive Info URL
|
// Derive Info URL
|
||||||
var infoURL string
|
var infoURL string
|
||||||
@ -169,6 +180,212 @@ func getPage(page string) (io.ReadCloser, error) {
|
|||||||
return resp.Body, err
|
return resp.Body, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func parseLibGenFiction(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize Results
|
||||||
|
var allEntries []SearchItem
|
||||||
|
doc.Find("table.catalog tbody > tr").Each(func(ix int, rawBook *goquery.Selection) {
|
||||||
|
|
||||||
|
// Parse File Details
|
||||||
|
fileItem := rawBook.Find("td:nth-child(5)")
|
||||||
|
fileDesc := fileItem.Text()
|
||||||
|
fileDescSplit := strings.Split(fileDesc, "/")
|
||||||
|
fileType := strings.ToLower(strings.TrimSpace(fileDescSplit[0]))
|
||||||
|
fileSize := strings.TrimSpace(fileDescSplit[1])
|
||||||
|
|
||||||
|
// Parse Upload Date
|
||||||
|
uploadedRaw, _ := fileItem.Attr("title")
|
||||||
|
uploadedDateRaw := strings.Split(uploadedRaw, "Uploaded at ")[1]
|
||||||
|
uploadDate, _ := time.Parse("2006-01-02 15:04:05", uploadedDateRaw)
|
||||||
|
|
||||||
|
// Parse MD5
|
||||||
|
editHref, _ := rawBook.Find("td:nth-child(7) a").Attr("href")
|
||||||
|
hrefArray := strings.Split(editHref, "/")
|
||||||
|
id := hrefArray[len(hrefArray)-1]
|
||||||
|
|
||||||
|
// Parse Other Details
|
||||||
|
title := rawBook.Find("td:nth-child(3) p a").Text()
|
||||||
|
author := rawBook.Find(".catalog_authors li a").Text()
|
||||||
|
language := rawBook.Find("td:nth-child(4)").Text()
|
||||||
|
series := rawBook.Find("td:nth-child(2)").Text()
|
||||||
|
|
||||||
|
item := SearchItem{
|
||||||
|
ID: id,
|
||||||
|
Title: title,
|
||||||
|
Author: author,
|
||||||
|
Series: series,
|
||||||
|
Language: language,
|
||||||
|
FileType: fileType,
|
||||||
|
FileSize: fileSize,
|
||||||
|
UploadDate: uploadDate.Format(time.RFC3339),
|
||||||
|
}
|
||||||
|
|
||||||
|
allEntries = append(allEntries, item)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return Results
|
||||||
|
return allEntries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseLibGenNonFiction(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize Results
|
||||||
|
var allEntries []SearchItem
|
||||||
|
doc.Find("table.c tbody > tr:nth-child(n + 2)").Each(func(ix int, rawBook *goquery.Selection) {
|
||||||
|
|
||||||
|
// Parse Type & Size
|
||||||
|
fileSize := strings.ToLower(strings.TrimSpace(rawBook.Find("td:nth-child(8)").Text()))
|
||||||
|
fileType := strings.ToLower(strings.TrimSpace(rawBook.Find("td:nth-child(9)").Text()))
|
||||||
|
|
||||||
|
// Parse MD5
|
||||||
|
titleRaw := rawBook.Find("td:nth-child(3) [id]")
|
||||||
|
editHref, _ := titleRaw.Attr("href")
|
||||||
|
hrefArray := strings.Split(editHref, "?md5=")
|
||||||
|
id := hrefArray[1]
|
||||||
|
|
||||||
|
// Parse Other Details
|
||||||
|
title := titleRaw.Text()
|
||||||
|
author := rawBook.Find("td:nth-child(2)").Text()
|
||||||
|
language := rawBook.Find("td:nth-child(7)").Text()
|
||||||
|
series := rawBook.Find("td:nth-child(3) [href*='column=series']").Text()
|
||||||
|
|
||||||
|
item := SearchItem{
|
||||||
|
ID: id,
|
||||||
|
Title: title,
|
||||||
|
Author: author,
|
||||||
|
Series: series,
|
||||||
|
Language: language,
|
||||||
|
FileType: fileType,
|
||||||
|
FileSize: fileSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
allEntries = append(allEntries, item)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return Results
|
||||||
|
return allEntries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseLibGenDownloadURL(body io.ReadCloser) (string, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, _ := goquery.NewDocumentFromReader(body)
|
||||||
|
|
||||||
|
// Return Download URL
|
||||||
|
// downloadURL, _ := doc.Find("#download [href*=cloudflare]").Attr("href")
|
||||||
|
downloadURL, exists := doc.Find("#download h2 a").Attr("href")
|
||||||
|
if exists == false {
|
||||||
|
return "", errors.New("Download URL not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return downloadURL, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseGoodReads(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize Results
|
||||||
|
var allEntries []SearchItem
|
||||||
|
|
||||||
|
doc.Find("[itemtype=\"http://schema.org/Book\"]").Each(func(ix int, rawBook *goquery.Selection) {
|
||||||
|
title := rawBook.Find(".bookTitle span").Text()
|
||||||
|
author := rawBook.Find(".authorName span").Text()
|
||||||
|
|
||||||
|
item := SearchItem{
|
||||||
|
Title: title,
|
||||||
|
Author: author,
|
||||||
|
}
|
||||||
|
|
||||||
|
allEntries = append(allEntries, item)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return Results
|
||||||
|
return allEntries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseAnnasArchiveDownloadURL(body io.ReadCloser) (string, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, _ := goquery.NewDocumentFromReader(body)
|
||||||
|
|
||||||
|
// Return Download URL
|
||||||
|
downloadURL, exists := doc.Find("body > table > tbody > tr > td > a").Attr("href")
|
||||||
|
if exists == false {
|
||||||
|
return "", errors.New("Download URL not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Possible Funky URL
|
||||||
|
downloadURL = strings.ReplaceAll(downloadURL, "\\", "/")
|
||||||
|
|
||||||
|
return downloadURL, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseAnnasArchive(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize Results
|
||||||
|
var allEntries []SearchItem
|
||||||
|
doc.Find("form > div.w-full > div.w-full > div > div.justify-center").Each(func(ix int, rawBook *goquery.Selection) {
|
||||||
|
// Parse Details
|
||||||
|
details := rawBook.Find("div:nth-child(2) > div:nth-child(1)").Text()
|
||||||
|
detailsSplit := strings.Split(details, ", ")
|
||||||
|
|
||||||
|
// Invalid Details
|
||||||
|
if len(detailsSplit) < 3 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
language := detailsSplit[0]
|
||||||
|
fileType := detailsSplit[1]
|
||||||
|
fileSize := detailsSplit[2]
|
||||||
|
|
||||||
|
// Get Title & Author
|
||||||
|
title := rawBook.Find("h3").Text()
|
||||||
|
author := rawBook.Find("div:nth-child(2) > div:nth-child(4)").Text()
|
||||||
|
|
||||||
|
// Parse MD5
|
||||||
|
itemHref, _ := rawBook.Find("a").Attr("href")
|
||||||
|
hrefArray := strings.Split(itemHref, "/")
|
||||||
|
id := hrefArray[len(hrefArray)-1]
|
||||||
|
|
||||||
|
item := SearchItem{
|
||||||
|
ID: id,
|
||||||
|
Title: title,
|
||||||
|
Author: author,
|
||||||
|
Language: language,
|
||||||
|
FileType: fileType,
|
||||||
|
FileSize: fileSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
allEntries = append(allEntries, item)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return Results
|
||||||
|
return allEntries, nil
|
||||||
|
}
|
||||||
|
|
||||||
func downloadBook(bookURL string) (*http.Response, error) {
|
func downloadBook(bookURL string) (*http.Response, error) {
|
||||||
// Allow Insecure
|
// Allow Insecure
|
||||||
client := &http.Client{Transport: &http.Transport{
|
client := &http.Client{Transport: &http.Transport{
|
||||||
|
@ -23,9 +23,7 @@
|
|||||||
</button>
|
</button>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<!-- Required for iOS "Hover" Events (onclick) -->
|
<div class="flex flex-col-reverse text-black dark:text-white"
|
||||||
<div onclick
|
|
||||||
class="flex flex-col-reverse text-black dark:text-white w-full overflow-scroll"
|
|
||||||
style="font-family: monospace">
|
style="font-family: monospace">
|
||||||
{{ range $log := .Data }}
|
{{ range $log := .Data }}
|
||||||
<span class="whitespace-nowrap hover:whitespace-pre">{{ $log }}</span>
|
<span class="whitespace-nowrap hover:whitespace-pre">{{ $log }}</span>
|
||||||
|
@ -10,10 +10,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Reimplemented KOReader Partial MD5 Calculation
|
// Reimplemented KOReader Partial MD5 Calculation
|
||||||
func CalculatePartialMD5(filePath string) (*string, error) {
|
func CalculatePartialMD5(filePath string) (string, error) {
|
||||||
file, err := os.Open(filePath)
|
file, err := os.Open(filePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
@ -41,8 +41,7 @@ func CalculatePartialMD5(filePath string) (*string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
allBytes := buf.Bytes()
|
allBytes := buf.Bytes()
|
||||||
fileHash := fmt.Sprintf("%x", md5.Sum(allBytes))
|
return fmt.Sprintf("%x", md5.Sum(allBytes)), nil
|
||||||
return &fileHash, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates a token of n size
|
// Creates a token of n size
|
||||||
@ -54,23 +53,3 @@ func GenerateToken(n int) ([]byte, error) {
|
|||||||
}
|
}
|
||||||
return b, nil
|
return b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate MD5 of a file
|
|
||||||
func CalculateMD5(filePath string) (*string, error) {
|
|
||||||
file, err := os.Open(filePath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
hash := md5.New()
|
|
||||||
_, err = io.Copy(hash, file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
fileHash := fmt.Sprintf("%x", hash.Sum(nil))
|
|
||||||
|
|
||||||
return &fileHash, nil
|
|
||||||
}
|
|
||||||
|
@ -1,26 +1,12 @@
|
|||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import "testing"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCalculatePartialMD5(t *testing.T) {
|
func TestCalculatePartialMD5(t *testing.T) {
|
||||||
assert := assert.New(t)
|
partialMD5, err := CalculatePartialMD5("../_test_files/alice.epub")
|
||||||
|
|
||||||
desiredPartialMD5 := "386d1cb51fe4a72e5c9fdad5e059bad9"
|
want := "386d1cb51fe4a72e5c9fdad5e059bad9"
|
||||||
calculatedPartialMD5, err := CalculatePartialMD5("../_test_files/alice.epub")
|
if partialMD5 != want {
|
||||||
|
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, partialMD5, err)
|
||||||
assert.Nil(err, "error should be nil")
|
}
|
||||||
assert.Equal(desiredPartialMD5, *calculatedPartialMD5, "should be equal")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCalculateMD5(t *testing.T) {
|
|
||||||
assert := assert.New(t)
|
|
||||||
|
|
||||||
desiredMD5 := "0f36c66155de34b281c4791654d0b1ce"
|
|
||||||
calculatedMD5, err := CalculateMD5("../_test_files/alice.epub")
|
|
||||||
|
|
||||||
assert.Nil(err, "error should be nil")
|
|
||||||
assert.Equal(desiredMD5, *calculatedMD5, "should be equal")
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user