Compare commits
2 Commits
37b6ac10ac
...
1b8b5060f1
Author | SHA1 | Date | |
---|---|---|---|
1b8b5060f1 | |||
b9b9ad2098 |
@ -209,7 +209,12 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Search
|
// Search
|
||||||
searchResults := search.SearchBook(*sParams.Query, bType)
|
searchResults, err := search.SearchBook(*sParams.Query, bType)
|
||||||
|
if err != nil {
|
||||||
|
errorPage(c, http.StatusInternalServerError, fmt.Sprintf("Search Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
templateVars["Data"] = searchResults
|
templateVars["Data"] = searchResults
|
||||||
templateVars["BookType"] = *sParams.BookType
|
templateVars["BookType"] = *sParams.BookType
|
||||||
}
|
}
|
||||||
|
@ -45,9 +45,15 @@ class EBookReader {
|
|||||||
* Load progress and generate locations
|
* Load progress and generate locations
|
||||||
**/
|
**/
|
||||||
async setupReader() {
|
async setupReader() {
|
||||||
|
// Get Word Count (If Needed)
|
||||||
|
if (this.bookState.words == 0)
|
||||||
|
this.bookState.words = await this.countWords();
|
||||||
|
|
||||||
// Load Progress
|
// Load Progress
|
||||||
let { cfi } = await this.getCFIFromXPath(this.bookState.progress);
|
let { cfi } = await this.getCFIFromXPath(this.bookState.progress);
|
||||||
if (!cfi) this.bookState.currentWord = 0;
|
this.bookState.currentWord = cfi
|
||||||
|
? this.bookState.percentage * (this.bookState.words / 100)
|
||||||
|
: 0;
|
||||||
|
|
||||||
let getStats = function () {
|
let getStats = function () {
|
||||||
// Start Timer
|
// Start Timer
|
||||||
@ -1038,6 +1044,21 @@ class EBookReader {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count the words of the book. Useful for keeping a more accurate track
|
||||||
|
* of progress percentage. Implementation returns the same number as the
|
||||||
|
* server side implementation.
|
||||||
|
**/
|
||||||
|
countWords() {
|
||||||
|
// Iterate over each item in the spine, render, and count words.
|
||||||
|
return this.book.spine.spineItems.reduce(async (totalCount, item) => {
|
||||||
|
let currentCount = await totalCount;
|
||||||
|
let newDoc = await item.load(this.book.load.bind(this.book));
|
||||||
|
let itemCount = newDoc.innerText.trim().split(/\s+/).length;
|
||||||
|
return currentCount + itemCount;
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Save settings to localStorage
|
* Save settings to localStorage
|
||||||
**/
|
**/
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
"github.com/taylorskalyo/goreader/epub"
|
"github.com/taylorskalyo/goreader/epub"
|
||||||
"golang.org/x/net/html"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func getEPUBMetadata(filepath string) (*MetadataInfo, error) {
|
func getEPUBMetadata(filepath string) (*MetadataInfo, error) {
|
||||||
@ -32,33 +31,9 @@ func countEPUBWords(filepath string) (int64, error) {
|
|||||||
var completeCount int64
|
var completeCount int64
|
||||||
for _, item := range rf.Spine.Itemrefs {
|
for _, item := range rf.Spine.Itemrefs {
|
||||||
f, _ := item.Open()
|
f, _ := item.Open()
|
||||||
tokenizer := html.NewTokenizer(f)
|
doc, _ := goquery.NewDocumentFromReader(f)
|
||||||
newCount, err := countTokenizerWords(*tokenizer)
|
completeCount = completeCount + int64(len(strings.Fields(doc.Text())))
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
completeCount = completeCount + newCount
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return completeCount, nil
|
return completeCount, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func countTokenizerWords(tokenizer html.Tokenizer) (int64, error) {
|
|
||||||
var err error
|
|
||||||
var totalWords int64
|
|
||||||
for {
|
|
||||||
tokenType := tokenizer.Next()
|
|
||||||
token := tokenizer.Token()
|
|
||||||
if tokenType == html.TextToken {
|
|
||||||
currStr := string(token.Data)
|
|
||||||
totalWords = totalWords + int64(len(strings.Fields(currStr)))
|
|
||||||
} else if tokenType == html.ErrorToken {
|
|
||||||
err = tokenizer.Err()
|
|
||||||
}
|
|
||||||
if err == io.EOF {
|
|
||||||
return totalWords, nil
|
|
||||||
} else if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -5,7 +5,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestGetWordCount(t *testing.T) {
|
func TestGetWordCount(t *testing.T) {
|
||||||
var want int64 = 30477
|
var want int64 = 30080
|
||||||
wordCount, err := countEPUBWords("../_test_files/alice.epub")
|
wordCount, err := countEPUBWords("../_test_files/alice.epub")
|
||||||
|
|
||||||
if wordCount != want {
|
if wordCount != want {
|
||||||
|
101
search/search.go
101
search/search.go
@ -38,29 +38,37 @@ type SearchItem struct {
|
|||||||
UploadDate string
|
UploadDate string
|
||||||
}
|
}
|
||||||
|
|
||||||
func SearchBook(query string, bookType BookType) (allEntries []SearchItem) {
|
func SearchBook(query string, bookType BookType) ([]SearchItem, error) {
|
||||||
log.Info(query)
|
|
||||||
if bookType == BOOK_FICTION {
|
if bookType == BOOK_FICTION {
|
||||||
// Search Fiction
|
// Search Fiction
|
||||||
url := "https://libgen.is/fiction/?q=" + url.QueryEscape(query) + "&language=English&format=epub"
|
url := "https://libgen.is/fiction/?q=" + url.QueryEscape(query) + "&language=English&format=epub"
|
||||||
body := getPage(url)
|
body, err := getPage(url)
|
||||||
allEntries = parseLibGenFiction(body)
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return parseLibGenFiction(body)
|
||||||
} else if bookType == BOOK_NON_FICTION {
|
} else if bookType == BOOK_NON_FICTION {
|
||||||
// Search NonFiction
|
// Search NonFiction
|
||||||
url := "https://libgen.is/search.php?req=" + url.QueryEscape(query)
|
url := "https://libgen.is/search.php?req=" + url.QueryEscape(query)
|
||||||
body := getPage(url)
|
body, err := getPage(url)
|
||||||
allEntries = parseLibGenNonFiction(body)
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return parseLibGenNonFiction(body)
|
||||||
|
} else {
|
||||||
|
return nil, errors.New("Invalid Book Type")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
func GoodReadsMostRead(c Cadence) ([]SearchItem, error) {
|
||||||
|
body, err := getPage("https://www.goodreads.com/book/most_read?category=all&country=US&duration=" + string(c))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func GoodReadsMostRead(c Cadence) []SearchItem {
|
|
||||||
body := getPage("https://www.goodreads.com/book/most_read?category=all&country=US&duration=" + string(c))
|
|
||||||
return parseGoodReads(body)
|
return parseGoodReads(body)
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetBookURL(id string, bookType BookType) string {
|
func GetBookURL(id string, bookType BookType) (string, error) {
|
||||||
// Derive Info URL
|
// Derive Info URL
|
||||||
var infoURL string
|
var infoURL string
|
||||||
if bookType == BOOK_FICTION {
|
if bookType == BOOK_FICTION {
|
||||||
@ -70,7 +78,10 @@ func GetBookURL(id string, bookType BookType) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Parse & Derive Download URL
|
// Parse & Derive Download URL
|
||||||
body := getPage(infoURL)
|
body, err := getPage(infoURL)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
// downloadURL := parseLibGenDownloadURL(body)
|
// downloadURL := parseLibGenDownloadURL(body)
|
||||||
return parseLibGenDownloadURL(body)
|
return parseLibGenDownloadURL(body)
|
||||||
@ -86,8 +97,15 @@ func SaveBook(id string, bookType BookType) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Parse & Derive Download URL
|
// Parse & Derive Download URL
|
||||||
body := getPage(infoURL)
|
body, err := getPage(infoURL)
|
||||||
bookURL := parseLibGenDownloadURL(body)
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
bookURL, err := parseLibGenDownloadURL(body)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("[SaveBook] Parse Download URL Error: ", err)
|
||||||
|
return "", errors.New("Download Failure")
|
||||||
|
}
|
||||||
|
|
||||||
// Create File
|
// Create File
|
||||||
tempFile, err := os.CreateTemp("", "book")
|
tempFile, err := os.CreateTemp("", "book")
|
||||||
@ -119,15 +137,29 @@ func SaveBook(id string, bookType BookType) (string, error) {
|
|||||||
return tempFile.Name(), nil
|
return tempFile.Name(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getPage(page string) io.ReadCloser {
|
func getPage(page string) (io.ReadCloser, error) {
|
||||||
resp, _ := http.Get(page)
|
// Set 10s Timeout
|
||||||
return resp.Body
|
client := http.Client{
|
||||||
|
Timeout: 10 * time.Second,
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseLibGenFiction(body io.ReadCloser) []SearchItem {
|
// Get Page
|
||||||
|
resp, err := client.Get(page)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return Body
|
||||||
|
return resp.Body, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseLibGenFiction(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
// Parse
|
// Parse
|
||||||
defer body.Close()
|
defer body.Close()
|
||||||
doc, _ := goquery.NewDocumentFromReader(body)
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// Normalize Results
|
// Normalize Results
|
||||||
var allEntries []SearchItem
|
var allEntries []SearchItem
|
||||||
@ -171,13 +203,16 @@ func parseLibGenFiction(body io.ReadCloser) []SearchItem {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Return Results
|
// Return Results
|
||||||
return allEntries
|
return allEntries, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseLibGenNonFiction(body io.ReadCloser) []SearchItem {
|
func parseLibGenNonFiction(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
// Parse
|
// Parse
|
||||||
defer body.Close()
|
defer body.Close()
|
||||||
doc, _ := goquery.NewDocumentFromReader(body)
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// Normalize Results
|
// Normalize Results
|
||||||
var allEntries []SearchItem
|
var allEntries []SearchItem
|
||||||
@ -213,25 +248,31 @@ func parseLibGenNonFiction(body io.ReadCloser) []SearchItem {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Return Results
|
// Return Results
|
||||||
return allEntries
|
return allEntries, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseLibGenDownloadURL(body io.ReadCloser) string {
|
func parseLibGenDownloadURL(body io.ReadCloser) (string, error) {
|
||||||
// Parse
|
// Parse
|
||||||
defer body.Close()
|
defer body.Close()
|
||||||
doc, _ := goquery.NewDocumentFromReader(body)
|
doc, _ := goquery.NewDocumentFromReader(body)
|
||||||
|
|
||||||
// Return Download URL
|
// Return Download URL
|
||||||
// downloadURL, _ := doc.Find("#download [href*=cloudflare]").Attr("href")
|
// downloadURL, _ := doc.Find("#download [href*=cloudflare]").Attr("href")
|
||||||
downloadURL, _ := doc.Find("#download h2 a").Attr("href")
|
downloadURL, exists := doc.Find("#download h2 a").Attr("href")
|
||||||
|
if exists == false {
|
||||||
return downloadURL
|
return "", errors.New("Download URL not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseGoodReads(body io.ReadCloser) []SearchItem {
|
return downloadURL, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseGoodReads(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
// Parse
|
// Parse
|
||||||
defer body.Close()
|
defer body.Close()
|
||||||
doc, _ := goquery.NewDocumentFromReader(body)
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// Normalize Results
|
// Normalize Results
|
||||||
var allEntries []SearchItem
|
var allEntries []SearchItem
|
||||||
@ -249,5 +290,5 @@ func parseGoodReads(body io.ReadCloser) []SearchItem {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Return Results
|
// Return Results
|
||||||
return allEntries
|
return allEntries, nil
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user