commit 0c5b836ba24511aa76699bd7d8071c45fe2943f2 Author: Evan Reichard Date: Mon Sep 18 19:57:18 2023 -0400 Initial Commit diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..1d953f4 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use nix diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9c45321 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.DS_Store +data/ +.direnv/ diff --git a/.sqlfluff b/.sqlfluff new file mode 100644 index 0000000..d12d5ec --- /dev/null +++ b/.sqlfluff @@ -0,0 +1,2 @@ +[sqlfluff] +dialect = sqlite diff --git a/API.md b/API.md new file mode 100644 index 0000000..937f0ec --- /dev/null +++ b/API.md @@ -0,0 +1,13 @@ +# API + +## Original Endpoints + +POST /users/create +GET /users/auth +GET /syncs/progress/:document +PUT /syncs/progress + +## New Endpoints + +GET /syncs/activity +POST /syncs/activity diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..24ea25b --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +# FROM golang:1.20-alpine AS build +FROM alpine:edge AS build +RUN apk add --no-cache --update go gcc g++ +WORKDIR /app +COPY . /app +RUN go mod download +RUN CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /sync-ninja cmd/main.go + +FROM alpine:3.18 +COPY --from=build /sync-ninja /sync-ninja +EXPOSE 8585 +ENTRYPOINT ["/sync-ninja", "serve"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..4522ba0 --- /dev/null +++ b/LICENSE @@ -0,0 +1,339 @@ +GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {{description}} + Copyright (C) {{year}} {{fullname}} + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + {signature of Ty Coon}, 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..aceb130 --- /dev/null +++ b/Makefile @@ -0,0 +1,12 @@ +docker_build_local: + docker build -t sync-ninja:latest . + +docker_build_release_beta: + docker buildx build \ + --platform linux/amd64,linux/arm64 \ + -t gitea.va.reichard.io/reichard/sync-ninja:beta --push . + +docker_build_release_latest: + docker buildx build \ + --platform linux/amd64,linux/arm64 \ + -t gitea.va.reichard.io/reichard/sync-ninja:latest --push . diff --git a/README.md b/README.md new file mode 100644 index 0000000..9b33469 --- /dev/null +++ b/README.md @@ -0,0 +1,46 @@ +# Book Manager + +

+ + + + + + + + + +

+ +--- + +This is BookManager! Will probably be renamed at some point. This repository contains: + +- [KOReader KOSync](https://github.com/koreader/koreader-sync-server) Compatible API +- KOReader Plugin (See `client` subfolder) +- WebApp + +In additional to the compatible KOSync API's, we add: + +- Additional APIs to automatically upload reading statistics +- Automatically upload documents to the server (can download in the "Documents" view) +- Automatic book cover metadata scraping (Thanks [OpenLibrary](https://openlibrary.org/)) + +# Development + +SQLC Generation: + +``` +go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest +~/go/bin/sqlc generate +``` + +Run Development: + +``` +CONFIG_PATH=./data DATA_PATH=./data go run cmd/main.go serve +``` + +## Notes + +- Icons: https://www.svgrepo.com/collection/solar-bold-icons diff --git a/api/api.go b/api/api.go new file mode 100644 index 0000000..5e20776 --- /dev/null +++ b/api/api.go @@ -0,0 +1,137 @@ +package api + +import ( + "crypto/rand" + "html/template" + "net/http" + + "github.com/gin-contrib/multitemplate" + "github.com/gin-contrib/sessions" + "github.com/gin-contrib/sessions/cookie" + "github.com/gin-gonic/gin" + "reichard.io/bbank/config" + "reichard.io/bbank/database" + "reichard.io/bbank/graph" +) + +type API struct { + Router *gin.Engine + Config *config.Config + DB *database.DBManager +} + +func NewApi(db *database.DBManager, c *config.Config) *API { + api := &API{ + Router: gin.Default(), + Config: c, + DB: db, + } + + // Assets & Web App Templates + api.Router.Static("/assets", "./assets") + + // Generate Secure Token + newToken, err := generateToken(64) + if err != nil { + panic("Unable to generate secure token") + } + + // Configure Cookie Session Store + store := cookie.NewStore(newToken) + store.Options(sessions.Options{ + MaxAge: 60 * 60 * 24, + Secure: true, + HttpOnly: true, + SameSite: http.SameSiteStrictMode, + }) + api.Router.Use(sessions.Sessions("token", store)) + + // Register Web App Route + api.registerWebAppRoutes() + + // Register API Routes + apiGroup := api.Router.Group("/api") + api.registerKOAPIRoutes(apiGroup) + api.registerWebAPIRoutes(apiGroup) + + return api +} + +func (api *API) registerWebAppRoutes() { + // Define Templates & Helper Functions + render := multitemplate.NewRenderer() + helperFuncs := template.FuncMap{ + "GetSVGGraphData": graph.GetSVGGraphData, + } + + render.AddFromFilesFuncs("login", helperFuncs, "templates/login.html") + render.AddFromFilesFuncs("home", helperFuncs, "templates/base.html", "templates/home.html") + render.AddFromFilesFuncs("graphs", helperFuncs, "templates/base.html", "templates/graphs.html") + render.AddFromFilesFuncs("activity", helperFuncs, "templates/base.html", "templates/activity.html") + render.AddFromFilesFuncs("documents", helperFuncs, "templates/base.html", "templates/documents.html") + + api.Router.HTMLRender = render + + api.Router.GET("/login", api.createAppResourcesRoute("login")) + api.Router.GET("/register", api.createAppResourcesRoute("login", gin.H{"Register": true})) + api.Router.GET("/logout", api.authWebAppMiddleware, api.authLogout) + api.Router.POST("/login", api.authFormLogin) + api.Router.POST("/register", api.authFormRegister) + + api.Router.GET("/", api.authWebAppMiddleware, api.createAppResourcesRoute("home")) + api.Router.GET("/documents", api.authWebAppMiddleware, api.createAppResourcesRoute("documents")) + api.Router.GET("/documents/:document/file", api.authWebAppMiddleware, api.downloadDocumentFile) + api.Router.GET("/documents/:document/cover", api.authWebAppMiddleware, api.getDocumentCover) + + // TODO + api.Router.GET("/activity", api.authWebAppMiddleware, baseResourceRoute("activity")) + api.Router.GET("/graphs", api.authWebAppMiddleware, baseResourceRoute("graphs")) + +} + +func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) { + koGroup := apiGroup.Group("/ko") + + koGroup.GET("/info", api.serverInfo) + + koGroup.POST("/users/create", api.createUser) + koGroup.GET("/users/auth", api.authAPIMiddleware, api.authorizeUser) + + koGroup.PUT("/syncs/progress", api.authAPIMiddleware, api.setProgress) + koGroup.GET("/syncs/progress/:document", api.authAPIMiddleware, api.getProgress) + + koGroup.POST("/documents", api.authAPIMiddleware, api.addDocuments) + koGroup.POST("/syncs/documents", api.authAPIMiddleware, api.checkDocumentsSync) + koGroup.PUT("/documents/:document/file", api.authAPIMiddleware, api.uploadDocumentFile) + koGroup.GET("/documents/:document/file", api.authAPIMiddleware, api.downloadDocumentFile) + + koGroup.POST("/activity", api.authAPIMiddleware, api.addActivities) + koGroup.POST("/syncs/activity", api.authAPIMiddleware, api.checkActivitySync) +} + +func (api *API) registerWebAPIRoutes(apiGroup *gin.RouterGroup) { + v1Group := apiGroup.Group("/v1") + + v1Group.GET("/info", api.serverInfo) + + v1Group.POST("/users", api.createUser) + v1Group.GET("/users", api.authAPIMiddleware, api.getUsers) + + v1Group.POST("/documents", api.authAPIMiddleware, api.checkDocumentsSync) + v1Group.GET("/documents", api.authAPIMiddleware, api.getDocuments) + + v1Group.GET("/documents/:document/file", api.authAPIMiddleware, api.downloadDocumentFile) + v1Group.PUT("/documents/:document/file", api.authAPIMiddleware, api.uploadDocumentFile) + + v1Group.GET("/activity", api.authAPIMiddleware, api.getActivity) + v1Group.GET("/devices", api.authAPIMiddleware, api.getDevices) +} + +func generateToken(n int) ([]byte, error) { + b := make([]byte, n) + _, err := rand.Read(b) + if err != nil { + return nil, err + } + return b, nil +} diff --git a/api/app-routes.go b/api/app-routes.go new file mode 100644 index 0000000..5aedab8 --- /dev/null +++ b/api/app-routes.go @@ -0,0 +1,169 @@ +package api + +import ( + "fmt" + "net/http" + "os" + "path/filepath" + + "github.com/gin-gonic/gin" + log "github.com/sirupsen/logrus" + "reichard.io/bbank/database" + "reichard.io/bbank/metadata" +) + +func baseResourceRoute(template string, args ...map[string]any) func(c *gin.Context) { + variables := gin.H{"RouteName": template} + if len(args) > 0 { + variables = args[0] + } + + return func(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + variables["User"] = rUser + c.HTML(http.StatusOK, template, variables) + } +} + +func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any) func(*gin.Context) { + // Merge Optional Template Data + var templateVars = gin.H{} + if len(args) > 0 { + templateVars = args[0] + } + templateVars["RouteName"] = routeName + + return func(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + qParams := bindQueryParams(c) + templateVars["User"] = rUser + + if routeName == "documents" { + documents, err := api.DB.Queries.GetDocumentsWithStats(api.DB.Ctx, database.GetDocumentsWithStatsParams{ + UserID: rUser.(string), + Offset: (*qParams.Page - 1) * *qParams.Limit, + Limit: *qParams.Limit, + }) + if err != nil { + log.Info(err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + templateVars["Data"] = documents + } else if routeName == "home" { + weekly_streak, _ := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{ + UserID: rUser.(string), + Window: "WEEK", + }) + + daily_streak, _ := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{ + UserID: rUser.(string), + Window: "DAY", + }) + + database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, rUser.(string)) + read_graph_data, err := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, rUser.(string)) + if err != nil { + log.Info("HMMMM:", err) + } + + templateVars["Data"] = gin.H{ + "DailyStreak": daily_streak, + "WeeklyStreak": weekly_streak, + "DatabaseInfo": database_info, + "GraphData": read_graph_data, + } + } + + c.HTML(http.StatusOK, routeName, templateVars) + } +} + +func (api *API) getDocumentCover(c *gin.Context) { + var rDoc requestDocumentID + if err := c.ShouldBindUri(&rDoc); err != nil { + c.AbortWithStatus(http.StatusBadRequest) + return + } + + // Validate Document Exists in DB + document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID) + if err != nil { + c.AbortWithStatus(http.StatusBadRequest) + return + } + + // Handle Identified Document + if document.Olid != nil { + if *document.Olid == "UNKNOWN" { + c.Redirect(http.StatusFound, "/assets/no-cover.jpg") + return + } + + // Derive Path + fileName := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", *document.Olid)) + safePath := filepath.Join(api.Config.DataPath, "covers", fileName) + + // Validate File Exists + _, err = os.Stat(safePath) + if err != nil { + c.Redirect(http.StatusFound, "/assets/no-cover.jpg") + return + } + + c.File(safePath) + return + } + + /* + This is a bit convoluted because we want to ensure we set the OLID to + UNKNOWN if there are any errors. This will ideally prevent us from + hitting the OpenLibrary API multiple times in the future. + */ + + var coverID string = "UNKNOWN" + var coverFilePath *string + + // Identify Documents & Save Covers + coverIDs, err := metadata.GetCoverIDs(document.Title, document.Author) + if err == nil && len(coverIDs) > 0 { + coverFilePath, err = metadata.DownloadAndSaveCover(coverIDs[0], api.Config.DataPath) + if err == nil { + coverID = coverIDs[0] + } + } + + // Upsert Document + if _, err = api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ + ID: document.ID, + Olid: &coverID, + }); err != nil { + log.Error("Document Upsert Error") + } + + // Return Unknown Cover + if coverID == "UNKNOWN" { + c.Redirect(http.StatusFound, "/assets/no-cover.jpg") + return + } + + c.File(*coverFilePath) +} + +/* +METADATA: + - Metadata Match + - Update Metadata +*/ + +/* +GRAPHS: + - Streaks (Daily, Weekly, Monthly) + - Last Week Activity (Daily - Pages & Time) + + + - Pages Read (Daily, Weekly, Monthly) + - Reading Progress + - Average Reading Time (Daily, Weekly, Monthly) +*/ diff --git a/api/auth.go b/api/auth.go new file mode 100644 index 0000000..86107e2 --- /dev/null +++ b/api/auth.go @@ -0,0 +1,167 @@ +package api + +import ( + "crypto/md5" + "fmt" + "net/http" + "strings" + + argon2 "github.com/alexedwards/argon2id" + "github.com/gin-contrib/sessions" + "github.com/gin-gonic/gin" + "reichard.io/bbank/database" +) + +type authHeader struct { + AuthUser string `header:"x-auth-user"` + AuthKey string `header:"x-auth-key"` +} + +func (api *API) authorizeCredentials(username string, password string) (authorized bool) { + user, err := api.DB.Queries.GetUser(api.DB.Ctx, username) + if err != nil { + return false + } + + if match, err := argon2.ComparePasswordAndHash(password, user.Pass); err != nil || match != true { + return false + } + + return true +} + +func (api *API) authAPIMiddleware(c *gin.Context) { + session := sessions.Default(c) + + // Utilize Session Token + if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil { + c.Set("AuthorizedUser", authorizedUser) + c.Next() + return + } + + var rHeader authHeader + if err := c.ShouldBindHeader(&rHeader); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"}) + return + } + if rHeader.AuthUser == "" || rHeader.AuthKey == "" { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"}) + return + } + + if authorized := api.authorizeCredentials(rHeader.AuthUser, rHeader.AuthKey); authorized != true { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) + return + } + + // Set Session Cookie + session.Set("authorizedUser", rHeader.AuthUser) + session.Save() + + c.Set("AuthorizedUser", rHeader.AuthUser) + c.Next() +} + +func (api *API) authWebAppMiddleware(c *gin.Context) { + session := sessions.Default(c) + + // Utilize Session Token + if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil { + c.Set("AuthorizedUser", authorizedUser) + c.Next() + return + } + + c.Redirect(http.StatusFound, "/login") + c.Abort() +} + +func (api *API) authFormLogin(c *gin.Context) { + username := strings.TrimSpace(c.PostForm("username")) + rawPassword := strings.TrimSpace(c.PostForm("password")) + + if username == "" || rawPassword == "" { + c.HTML(http.StatusUnauthorized, "login", gin.H{ + "Error": "Invalid Credentials", + }) + return + } + password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword))) + + if authorized := api.authorizeCredentials(username, password); authorized != true { + c.HTML(http.StatusUnauthorized, "login", gin.H{ + "Error": "Invalid Credentials", + }) + return + } + + session := sessions.Default(c) + + // Set Session Cookie + session.Set("authorizedUser", username) + session.Save() + + c.Redirect(http.StatusFound, "/") +} + +func (api *API) authLogout(c *gin.Context) { + session := sessions.Default(c) + session.Clear() + session.Save() + c.Redirect(http.StatusFound, "/login") +} + +func (api *API) authFormRegister(c *gin.Context) { + username := strings.TrimSpace(c.PostForm("username")) + rawPassword := strings.TrimSpace(c.PostForm("password")) + + if username == "" || rawPassword == "" { + c.HTML(http.StatusBadRequest, "login", gin.H{ + "Register": true, + "Error": "Registration Disabled or User Already Exists", + }) + return + } + password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword))) + + hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams) + if err != nil { + c.HTML(http.StatusBadRequest, "login", gin.H{ + "Register": true, + "Error": "Registration Disabled or User Already Exists", + }) + return + } + + rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{ + ID: username, + Pass: hashedPassword, + }) + + // SQL Error + if err != nil { + c.HTML(http.StatusBadRequest, "login", gin.H{ + "Register": true, + "Error": "Registration Disabled or User Already Exists", + }) + return + } + + // User Already Exists + if rows == 0 { + c.HTML(http.StatusBadRequest, "login", gin.H{ + "Register": true, + "Error": "Registration Disabled or User Already Exists", + }) + return + } + + session := sessions.Default(c) + + // Set Session Cookie + session.Set("authorizedUser", username) + session.Save() + + c.Redirect(http.StatusFound, "/") +} diff --git a/api/ko-routes.go b/api/ko-routes.go new file mode 100644 index 0000000..0d5f3be --- /dev/null +++ b/api/ko-routes.go @@ -0,0 +1,593 @@ +package api + +import ( + "crypto/md5" + "database/sql" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "time" + + argon2 "github.com/alexedwards/argon2id" + "github.com/gabriel-vasile/mimetype" + "github.com/gin-gonic/gin" + log "github.com/sirupsen/logrus" + "golang.org/x/exp/slices" + "reichard.io/bbank/database" +) + +type activityItem struct { + DocumentID string `json:"document"` + StartTime int64 `json:"start_time"` + Duration int64 `json:"duration"` + CurrentPage int64 `json:"current_page"` + TotalPages int64 `json:"total_pages"` +} + +type requestActivity struct { + DeviceID string `json:"device_id"` + Device string `json:"device"` + Activity []activityItem `json:"activity"` +} + +type requestCheckActivitySync struct { + DeviceID string `json:"device_id"` +} + +type requestDocument struct { + Documents []database.Document `json:"documents"` +} + +type requestPosition struct { + DocumentID string `json:"document"` + Percentage float64 `json:"percentage"` + Progress string `json:"progress"` + Device string `json:"device"` + DeviceID string `json:"device_id"` +} + +type requestUser struct { + Username string `json:"username"` + Password string `json:"password"` +} + +type requestCheckDocumentSync struct { + DeviceID string `json:"device_id"` + Device string `json:"device"` + Have []string `json:"have"` +} + +type responseCheckDocumentSync struct { + Want []string `json:"want"` + Give []database.Document `json:"give"` + Delete []string `json:"deleted"` +} + +type requestDocumentID struct { + DocumentID string `uri:"document" binding:"required"` +} + +var allowedExtensions []string = []string{".epub", ".html"} + +func (api *API) authorizeUser(c *gin.Context) { + c.JSON(200, gin.H{ + "authorized": "OK", + }) +} + +func (api *API) createUser(c *gin.Context) { + var rUser requestUser + if err := c.ShouldBindJSON(&rUser); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"}) + return + } + + if rUser.Username == "" || rUser.Password == "" { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"}) + return + } + + hashedPassword, err := argon2.CreateHash(rUser.Password, argon2.DefaultParams) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) + return + } + + // TODO - Initial User is Admin & Enable / Disable Registration + rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{ + ID: rUser.Username, + Pass: hashedPassword, + }) + + // SQL Error + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"}) + return + } + + // User Exists (ON CONFLICT DO NOTHING) + if rows == 0 { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "User Already Exists"}) + return + } + + // TODO: Struct -> JSON + c.JSON(http.StatusCreated, gin.H{ + "username": rUser.Username, + }) +} + +func (api *API) setProgress(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + + var rPosition requestPosition + if err := c.ShouldBindJSON(&rPosition); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Progress Data"}) + return + } + + // Upsert Device + device, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{ + ID: rPosition.DeviceID, + UserID: rUser.(string), + DeviceName: rPosition.Device, + }) + if err != nil { + log.Error("Device Upsert Error:", device, err) + } + + // Upsert Document + document, err := api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ + ID: rPosition.DocumentID, + }) + if err != nil { + log.Error("Document Upsert Error:", document, err) + } + + // Create or Replace Progress + progress, err := api.DB.Queries.UpdateProgress(api.DB.Ctx, database.UpdateProgressParams{ + Percentage: rPosition.Percentage, + DocumentID: rPosition.DocumentID, + DeviceID: rPosition.DeviceID, + UserID: rUser.(string), + Progress: rPosition.Progress, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + // TODO: Struct -> JSON + c.JSON(http.StatusOK, gin.H{ + "document": progress.DocumentID, + "timestamp": progress.CreatedAt, + }) +} + +func (api *API) getProgress(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + + var rDocID requestDocumentID + if err := c.ShouldBindUri(&rDocID); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + progress, err := api.DB.Queries.GetProgress(api.DB.Ctx, database.GetProgressParams{ + DocumentID: rDocID.DocumentID, + UserID: rUser.(string), + }) + + if err != nil { + log.Error("Invalid Progress:", progress, err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) + return + } + + // TODO: Struct -> JSON + c.JSON(http.StatusOK, gin.H{ + "document": progress.DocumentID, + "percentage": progress.Percentage, + "progress": progress.Progress, + "device": progress.DeviceName, + "device_id": progress.DeviceID, + }) +} + +func (api *API) addActivities(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + + var rActivity requestActivity + if err := c.ShouldBindJSON(&rActivity); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"}) + return + } + + // Do Transaction + tx, err := api.DB.DB.Begin() + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) + return + } + + // Derive Unique Documents + allDocumentsMap := make(map[string]bool) + for _, item := range rActivity.Activity { + allDocumentsMap[item.DocumentID] = true + } + allDocuments := getKeys(allDocumentsMap) + + // Defer & Start Transaction + defer tx.Rollback() + qtx := api.DB.Queries.WithTx(tx) + + // Upsert Documents + for _, doc := range allDocuments { + _, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ + ID: doc, + }) + + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) + return + } + } + + // Upsert Device + _, err = qtx.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{ + ID: rActivity.DeviceID, + UserID: rUser.(string), + DeviceName: rActivity.Device, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"}) + return + } + + // Add All Activity + for _, item := range rActivity.Activity { + _, err := qtx.AddActivity(api.DB.Ctx, database.AddActivityParams{ + UserID: rUser.(string), + DocumentID: item.DocumentID, + DeviceID: rActivity.DeviceID, + StartTime: time.Unix(int64(item.StartTime), 0).UTC(), + Duration: int64(item.Duration), + CurrentPage: int64(item.CurrentPage), + TotalPages: int64(item.TotalPages), + }) + + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"}) + return + } + } + + // Commit Transaction + tx.Commit() + + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) + return + } + + c.JSON(http.StatusOK, gin.H{ + "added": len(rActivity.Activity), + }) +} + +func (api *API) checkActivitySync(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + + var rCheckActivity requestCheckActivitySync + if err := c.ShouldBindJSON(&rCheckActivity); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + // Get Last Device Activity + lastActivity, err := api.DB.Queries.GetLastActivity(api.DB.Ctx, database.GetLastActivityParams{ + UserID: rUser.(string), + DeviceID: rCheckActivity.DeviceID, + }) + if err == sql.ErrNoRows { + lastActivity = time.UnixMilli(0) + } else if err != nil { + log.Error("GetLastActivity Error:", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) + return + } + + c.JSON(http.StatusOK, gin.H{ + "last_sync": lastActivity.Unix(), + }) +} + +func (api *API) addDocuments(c *gin.Context) { + var rNewDocs requestDocument + if err := c.ShouldBindJSON(&rNewDocs); err != nil { + log.Error("[addDocuments] Invalid JSON Bind") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document(s)"}) + return + } + + // Do Transaction + tx, err := api.DB.DB.Begin() + if err != nil { + log.Error("[addDocuments] Unknown Transaction Error") + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) + return + } + + // Defer & Start Transaction + defer tx.Rollback() + qtx := api.DB.Queries.WithTx(tx) + + // Upsert Documents + for _, doc := range rNewDocs.Documents { + doc, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ + ID: doc.ID, + Title: doc.Title, + Author: doc.Author, + Series: doc.Series, + SeriesIndex: doc.SeriesIndex, + Lang: doc.Lang, + Description: doc.Description, + }) + if err != nil { + log.Error("[addDocuments] UpsertDocument Error:", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) + return + } + + _, err = qtx.UpdateDocumentSync(api.DB.Ctx, database.UpdateDocumentSyncParams{ + ID: doc.ID, + Synced: true, + }) + if err != nil { + log.Error("[addDocuments] UpsertDocumentSync Error:", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) + return + } + + } + + // Commit Transaction + tx.Commit() + + c.JSON(http.StatusOK, gin.H{ + "changed": len(rNewDocs.Documents), + }) +} + +func (api *API) checkDocumentsSync(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + + var rCheckDocs requestCheckDocumentSync + if err := c.ShouldBindJSON(&rCheckDocs); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + // Upsert Device + device, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{ + ID: rCheckDocs.DeviceID, + UserID: rUser.(string), + DeviceName: rCheckDocs.Device, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"}) + return + } + + missingDocs := []database.Document{} + deletedDocIDs := []string{} + + if device.Sync == true { + // Get Missing Documents + missingDocs, err = api.DB.Queries.GetMissingDocuments(api.DB.Ctx, rCheckDocs.Have) + if err != nil { + log.Error("GetMissingDocuments Error:", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + // Get Deleted Documents + deletedDocIDs, err = api.DB.Queries.GetDeletedDocuments(api.DB.Ctx, rCheckDocs.Have) + if err != nil { + log.Error("GetDeletedDocuements Error:", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + } + + // Get Wanted Documents + jsonHaves, err := json.Marshal(rCheckDocs.Have) + if err != nil { + log.Error("JSON Marshal Error:", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + wantedDocIDs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves)) + if err != nil { + log.Error("GetWantedDocuments Error:", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + rCheckDocSync := responseCheckDocumentSync{ + Delete: []string{}, + Want: []string{}, + Give: []database.Document{}, + } + + // Ensure Empty Array + if wantedDocIDs != nil { + rCheckDocSync.Want = wantedDocIDs + } + if missingDocs != nil { + rCheckDocSync.Give = missingDocs + } + if deletedDocIDs != nil { + rCheckDocSync.Delete = deletedDocIDs + } + + c.JSON(http.StatusOK, rCheckDocSync) +} + +func (api *API) uploadDocumentFile(c *gin.Context) { + var rDoc requestDocumentID + if err := c.ShouldBindUri(&rDoc); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + fileData, err := c.FormFile("file") + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"}) + return + } + + // Validate Type & Derive Extension on MIME + uploadedFile, err := fileData.Open() + fileMime, err := mimetype.DetectReader(uploadedFile) + fileExtension := fileMime.Extension() + + if !slices.Contains(allowedExtensions, fileExtension) { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Filetype"}) + return + } + + // Validate Document Exists in DB + document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"}) + return + } + + // Derive Filename + var fileName string + if document.Author != nil { + fileName = fileName + *document.Author + } else { + fileName = fileName + "Unknown" + } + + if document.Title != nil { + fileName = fileName + " - " + *document.Title + } else { + fileName = fileName + " - Unknown" + } + + // Derive & Sanitize File Name + fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension)) + + // Generate Storage Path + safePath := filepath.Join(api.Config.DataPath, "documents", fileName) + + // Save & Prevent Overwrites + _, err = os.Stat(safePath) + if os.IsNotExist(err) { + err = c.SaveUploadedFile(fileData, safePath) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"}) + return + } + } + + // Get MD5 Hash + fileHash, err := getFileMD5(safePath) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"}) + return + } + + // Upsert Document + _, err = api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ + ID: document.ID, + Md5: fileHash, + Filepath: &fileName, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Error"}) + return + } + + // Update Document Sync Attribute + _, err = api.DB.Queries.UpdateDocumentSync(api.DB.Ctx, database.UpdateDocumentSyncParams{ + ID: document.ID, + Synced: true, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) + return + } + + c.JSON(http.StatusOK, gin.H{ + "status": "ok", + }) +} + +func (api *API) downloadDocumentFile(c *gin.Context) { + var rDoc requestDocumentID + if err := c.ShouldBindUri(&rDoc); err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + // Get Document + document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"}) + return + } + + if document.Filepath == nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exist"}) + return + } + + // Derive Storage Location + filePath := filepath.Join(api.Config.DataPath, "documents", *document.Filepath) + + // Validate File Exists + _, err = os.Stat(filePath) + if os.IsNotExist(err) { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exists"}) + return + } + + // Force Download (Security) + c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%s", filepath.Base(*document.Filepath))) + c.File(filePath) +} + +func getKeys[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} + +func getFileMD5(filePath string) (*string, error) { + file, err := os.Open(filePath) + if err != nil { + return nil, err + } + + defer file.Close() + + hash := md5.New() + _, err = io.Copy(hash, file) + if err != nil { + return nil, err + } + + fileHash := fmt.Sprintf("%x", hash.Sum(nil)) + + return &fileHash, nil +} diff --git a/api/web-routes.go b/api/web-routes.go new file mode 100644 index 0000000..4f7f4e1 --- /dev/null +++ b/api/web-routes.go @@ -0,0 +1,163 @@ +package api + +import ( + "net/http" + + argon2 "github.com/alexedwards/argon2id" + "github.com/gin-gonic/gin" + "reichard.io/bbank/database" +) + +type infoResponse struct { + Authorized bool `json:"authorized"` + Version string `json:"version"` +} + +type queryParams struct { + Page *int64 `form:"page"` + Limit *int64 `form:"limit"` + Document *string `form:"document"` +} + +func bindQueryParams(c *gin.Context) queryParams { + var qParams queryParams + c.BindQuery(&qParams) + + if qParams.Limit == nil { + var defaultValue int64 = 50 + qParams.Limit = &defaultValue + } else if *qParams.Limit < 0 { + var zeroValue int64 = 0 + qParams.Limit = &zeroValue + } + + if qParams.Page == nil || *qParams.Page < 1 { + var oneValue int64 = 0 + qParams.Page = &oneValue + } + + return qParams +} + +func (api *API) serverInfo(c *gin.Context) { + respData := infoResponse{ + Authorized: false, + Version: api.Config.Version, + } + + var rHeader authHeader + if err := c.ShouldBindHeader(&rHeader); err != nil { + c.JSON(200, respData) + return + } + if rHeader.AuthUser == "" || rHeader.AuthKey == "" { + c.JSON(200, respData) + return + } + + user, err := api.DB.Queries.GetUser(api.DB.Ctx, rHeader.AuthUser) + if err != nil { + c.JSON(200, respData) + return + } + + match, err := argon2.ComparePasswordAndHash(rHeader.AuthKey, user.Pass) + if err != nil || match != true { + c.JSON(200, respData) + return + } + + respData.Authorized = true + c.JSON(200, respData) +} + +func (api *API) getDocuments(c *gin.Context) { + qParams := bindQueryParams(c) + + documents, err := api.DB.Queries.GetDocuments(api.DB.Ctx, database.GetDocumentsParams{ + Offset: (*qParams.Page - 1) * *qParams.Limit, + Limit: *qParams.Limit, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + if documents == nil { + documents = []database.Document{} + } + + c.JSON(http.StatusOK, documents) +} + +func (api *API) getUsers(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + qParams := bindQueryParams(c) + + users, err := api.DB.Queries.GetUsers(api.DB.Ctx, database.GetUsersParams{ + User: rUser.(string), + Offset: (*qParams.Page - 1) * *qParams.Limit, + Limit: *qParams.Limit, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + if users == nil { + users = []database.User{} + } + + c.JSON(http.StatusOK, users) +} + +func (api *API) getActivity(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + qParams := bindQueryParams(c) + + dbActivityParams := database.GetActivityParams{ + UserID: rUser.(string), + DocFilter: false, + DocumentID: "", + Offset: (*qParams.Page - 1) * *qParams.Limit, + Limit: *qParams.Limit, + } + + if qParams.Document != nil { + dbActivityParams.DocFilter = true + dbActivityParams.DocumentID = *qParams.Document + } + + activity, err := api.DB.Queries.GetActivity(api.DB.Ctx, dbActivityParams) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + if activity == nil { + activity = []database.Activity{} + } + + c.JSON(http.StatusOK, activity) +} + +func (api *API) getDevices(c *gin.Context) { + rUser, _ := c.Get("AuthorizedUser") + qParams := bindQueryParams(c) + + devices, err := api.DB.Queries.GetDevices(api.DB.Ctx, database.GetDevicesParams{ + UserID: rUser.(string), + Offset: (*qParams.Page - 1) * *qParams.Limit, + Limit: *qParams.Limit, + }) + if err != nil { + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) + return + } + + if devices == nil { + devices = []database.Device{} + } + + c.JSON(http.StatusOK, devices) +} diff --git a/assets/book1.jpg b/assets/book1.jpg new file mode 100644 index 0000000..1d6bee7 Binary files /dev/null and b/assets/book1.jpg differ diff --git a/assets/book2.jpg b/assets/book2.jpg new file mode 100644 index 0000000..391659a Binary files /dev/null and b/assets/book2.jpg differ diff --git a/assets/book3.jpg b/assets/book3.jpg new file mode 100644 index 0000000..6f6962d Binary files /dev/null and b/assets/book3.jpg differ diff --git a/assets/book4.jpg b/assets/book4.jpg new file mode 100644 index 0000000..2f736e7 Binary files /dev/null and b/assets/book4.jpg differ diff --git a/assets/book55.jpg b/assets/book55.jpg new file mode 100644 index 0000000..5505e5e Binary files /dev/null and b/assets/book55.jpg differ diff --git a/assets/no-cover.jpg b/assets/no-cover.jpg new file mode 100644 index 0000000..556d117 Binary files /dev/null and b/assets/no-cover.jpg differ diff --git a/client/syncninja.koplugin/SyncNinjaClient.lua b/client/syncninja.koplugin/SyncNinjaClient.lua new file mode 100644 index 0000000..d80638b --- /dev/null +++ b/client/syncninja.koplugin/SyncNinjaClient.lua @@ -0,0 +1,316 @@ +local UIManager = require("ui/uimanager") +local socketutil = require("socketutil") +local logger = require("logger") + +-- Push/Pull +local SYNC_TIMEOUTS = {2, 5} + +-- Login/Register +local AUTH_TIMEOUTS = {5, 10} + +local SyncNinjaClient = {service_spec = nil, custom_url = nil} + +function SyncNinjaClient:new(o) + if o == nil then o = {} end + setmetatable(o, self) + self.__index = self + if o.init then o:init() end + return o +end + +function SyncNinjaClient:init() + local Spore = require("Spore") + self.client = Spore.new_from_spec(self.service_spec, + {base_url = self.custom_url}) + package.loaded["Spore.Middleware.GinClient"] = {} + require("Spore.Middleware.GinClient").call = function(_, req) + req.headers["accept"] = "application/vnd.koreader.v1+json" + end + package.loaded["Spore.Middleware.SyncNinjaAuth"] = {} + require("Spore.Middleware.SyncNinjaAuth").call = function(args, req) + req.headers["x-auth-user"] = args.username + req.headers["x-auth-key"] = args.userkey + end + package.loaded["Spore.Middleware.AsyncHTTP"] = {} + require("Spore.Middleware.AsyncHTTP").call = function(args, req) + -- disable async http if Turbo looper is missing + if not UIManager.looper then return end + req:finalize() + local result + + local turbo = require("turbo") + turbo.log.categories.success = false + turbo.log.categories.warning = false + + local client = turbo.async.HTTPClient({verify_ca = false}) + local res = coroutine.yield(client:fetch(request.url, { + url = req.url, + method = req.method, + body = req.env.spore.payload, + connect_timeout = 10, + request_timeout = 20, + on_headers = function(headers) + for header, value in pairs(req.headers) do + if type(header) == "string" then + headers:add(header, value) + end + end + end + })) + + return res + + -- return coroutine.create(function() coroutine.yield(result) end) + end +end + +------------------------------------------ +-------------- New Functions ------------- +------------------------------------------ + +function SyncNinjaClient:check_activity(username, password, device_id, callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) + local co = coroutine.create(function() + local ok, res = pcall(function() + return self.client:check_activity({device_id = device_id}) + end) + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:check_activity failure:", res) + callback(false, res.body) + end + end) + self.client:enable("AsyncHTTP", {thread = co}) + coroutine.resume(co) + if UIManager.looper then UIManager:setInputTimeout() end + socketutil:reset_timeout() +end + +function SyncNinjaClient:add_activity(username, password, device_id, device, + activity, callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) + local co = coroutine.create(function() + local ok, res = pcall(function() + return self.client:add_activity({ + device_id = device_id, + device = device, + activity = activity + }) + end) + + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:add_activity failure:", res) + callback(false, res.body) + end + end) + self.client:enable("AsyncHTTP", {thread = co}) + coroutine.resume(co) + if UIManager.looper then UIManager:setInputTimeout() end + socketutil:reset_timeout() +end + +function SyncNinjaClient:add_documents(username, password, documents, callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) + local co = coroutine.create(function() + local ok, res = pcall(function() + return self.client:add_documents({documents = documents}) + end) + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:add_documents failure:", res)( + "SyncNinjaClient:add_documents failure:", res) + callback(false, res.body) + end + end) + self.client:enable("AsyncHTTP", {thread = co}) + coroutine.resume(co) + if UIManager.looper then UIManager:setInputTimeout() end + socketutil:reset_timeout() +end + +function SyncNinjaClient:check_documents(username, password, device_id, device, + have, callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) + local co = coroutine.create(function() + local ok, res = pcall(function() + return self.client:check_documents({ + device_id = device_id, + device = device, + have = have + }) + end) + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:check_documents failure:", res) + callback(false, res.body) + end + end) + self.client:enable("AsyncHTTP", {thread = co}) + coroutine.resume(co) + if UIManager.looper then UIManager:setInputTimeout() end + socketutil:reset_timeout() +end + +function SyncNinjaClient:download_document(username, password, document, + callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + local ok, res = pcall(function() + return self.client:download_document({document = document}) + end) + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:download_document failure:", res) + callback(false, res.body) + end +end + +function SyncNinjaClient:upload_document(username, password, document, file, + callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + local ok, res = pcall(function() + return self.client:upload_document({document = document, file = file}) + end) + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:upload_document failure:", res) + callback(false, res.body) + end +end + +------------------------------------------ +----------- Existing Functions ----------- +------------------------------------------ + +function SyncNinjaClient:register(username, password) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + socketutil:set_timeout(AUTH_TIMEOUTS[1], AUTH_TIMEOUTS[2]) + local ok, res = pcall(function() + return self.client:register({username = username, password = password}) + end) + socketutil:reset_timeout() + if ok then + return res.status == 201, res.body + else + logger.dbg("SyncNinjaClient:register failure:", res) + return false, res.body + end +end + +function SyncNinjaClient:authorize(username, password) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + socketutil:set_timeout(AUTH_TIMEOUTS[1], AUTH_TIMEOUTS[2]) + local ok, res = pcall(function() return self.client:authorize() end) + socketutil:reset_timeout() + if ok then + return res.status == 200, res.body + else + logger.dbg("SyncNinjaClient:authorize failure:", res) + return false, res.body + end +end + +function SyncNinjaClient:update_progress(username, password, document, progress, + percentage, device, device_id, callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) + local co = coroutine.create(function() + local ok, res = pcall(function() + return self.client:update_progress({ + document = document, + progress = tostring(progress), + percentage = percentage, + device = device, + device_id = device_id + }) + end) + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:update_progress failure:", res) + callback(false, res.body) + end + end) + self.client:enable("AsyncHTTP", {thread = co}) + coroutine.resume(co) + if UIManager.looper then UIManager:setInputTimeout() end + socketutil:reset_timeout() +end + +function SyncNinjaClient:get_progress(username, password, document, callback) + self.client:reset_middlewares() + self.client:enable("Format.JSON") + self.client:enable("GinClient") + self.client:enable("SyncNinjaAuth", + {username = username, userkey = password}) + + socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) + local co = coroutine.create(function() + local ok, res = pcall(function() + return self.client:get_progress({document = document}) + end) + if ok then + callback(res.status == 200, res.body) + else + logger.dbg("SyncNinjaClient:get_progress failure:", res) + callback(false, res.body) + end + end) + self.client:enable("AsyncHTTP", {thread = co}) + coroutine.resume(co) + if UIManager.looper then UIManager:setInputTimeout() end + socketutil:reset_timeout() +end + +return SyncNinjaClient diff --git a/client/syncninja.koplugin/_meta.lua b/client/syncninja.koplugin/_meta.lua new file mode 100644 index 0000000..8ed5abf --- /dev/null +++ b/client/syncninja.koplugin/_meta.lua @@ -0,0 +1,6 @@ +local _ = require("gettext") +return { + name = "syncninja", + fullname = _("Additional sync capabilities"), + description = _([[Syncs your documents and activity to an altered server.]]) +} diff --git a/client/syncninja.koplugin/api.json b/client/syncninja.koplugin/api.json new file mode 100644 index 0000000..0ef3cc6 --- /dev/null +++ b/client/syncninja.koplugin/api.json @@ -0,0 +1,61 @@ +{ + "base_url": "http://127.0.0.1:8585", + "name": "syncninja", + "methods": { + "add_activity": { + "path": "/api/ko/activity", + "method": "POST", + "required_params": ["device_id", "device", "activity"], + "payload": ["device_id", "device", "activity"], + "expected_status": [200, 401] + }, + "add_documents": { + "path": "/api/ko/documents", + "method": "POST", + "required_params": ["documents"], + "payload": ["documents"], + "expected_status": [200, 401] + }, + "check_documents": { + "path": "/api/ko/syncs/documents", + "method": "POST", + "required_params": ["device_id", "device", "have"], + "payload": ["device_id", "device", "have"], + "expected_status": [200, 401] + }, + "check_activity": { + "path": "/api/ko/syncs/activity", + "method": "POST", + "required_params": ["device_id"], + "payload": ["device_id"], + "expected_status": [200, 401] + }, + "download_document": { + "path": "/api/ko/documents/:document/file", + "method": "GET", + "required_params": ["document"], + "expected_status": [200, 401] + }, + "upload_document": { + "path": "/api/ko/documents/:document/file", + "method": "PUT", + "required_params": ["document", "file"], + "form-data": { + "file": "@:file" + }, + "expected_status": [200, 401] + }, + "authorize": { + "path": "/api/ko/users/auth", + "method": "GET", + "expected_status": [200, 401] + }, + "register": { + "path": "/api/ko/users/create", + "method": "POST", + "required_params": ["username", "password"], + "payload": ["username", "password"], + "expected_status": [201, 402] + } + } +} diff --git a/client/syncninja.koplugin/main.lua b/client/syncninja.koplugin/main.lua new file mode 100644 index 0000000..dadfa1d --- /dev/null +++ b/client/syncninja.koplugin/main.lua @@ -0,0 +1,953 @@ +local DataStorage = require("datastorage") +local Device = require("device") +local Dispatcher = require("dispatcher") +local DocSettings = require("docsettings") +local InfoMessage = require("ui/widget/infomessage") +local MultiInputDialog = require("ui/widget/multiinputdialog") +local NetworkMgr = require("ui/network/manager") +local ReadHistory = require("readhistory") +local SQ3 = require("lua-ljsqlite3/init") +local T = require("ffi/util").template +local UIManager = require("ui/uimanager") +local WidgetContainer = require("ui/widget/container/widgetcontainer") +local _ = require("gettext") +local logger = require("logger") +local md5 = require("ffi/sha2").md5 + +-- TODO: +-- - Handle ReadHistory missing files (statistics.sqlite3, bookinfo_cache.sqlite3) +-- - Handle document uploads (Manual push only, warning saying this may take awhile) +-- - Configure activity bulk size? 1000, 5000, 10000? Separate manual settings to upload ALL? + +------------------------------------------ +------------ Helper Functions ------------ +------------------------------------------ +local function dump(o) + if type(o) == 'table' then + local s = '{ ' + for k, v in pairs(o) do + if type(k) ~= 'number' then k = '"' .. k .. '"' end + s = s .. '[' .. k .. '] = ' .. dump(v) .. ',' + end + return s .. '} ' + else + return tostring(o) + end +end + +local function validate(entry) + if not entry then return false end + if type(entry) == "string" then + if entry == "" or not entry:match("%S") then return false end + end + return true +end + +local function validateUser(user, pass) + local error_message = nil + local user_ok = validate(user) + local pass_ok = validate(pass) + if not user_ok and not pass_ok then + error_message = _("invalid username and password") + elseif not user_ok then + error_message = _("invalid username") + elseif not pass_ok then + error_message = _("invalid password") + end + + if not error_message then + return user_ok and pass_ok + else + return user_ok and pass_ok, error_message + end +end + +------------------------------------------ +-------------- Plugin Start -------------- +------------------------------------------ +local MERGE_SETTINGS_IN = "IN" +local MERGE_SETTINGS_OUT = "OUT" + +local STATISTICS_ACTIVITY_SINCE_QUERY = [[ + SELECT + b.md5 AS document, + psd.start_time AS start_time, + psd.duration AS duration, + psd.page AS current_page, + psd.total_pages + FROM page_stat_data AS psd + JOIN book AS b + ON b.id = psd.id_book + WHERE start_time > %d + ORDER BY start_time ASC LIMIT 1000; +]] + +local STATISTICS_BOOK_QUERY = [[ + SELECT + md5, + title, + authors, + series, + language + FROM book; +]] + +local BOOKINFO_BOOK_QUERY = [[ + SELECT + (directory || filename) as filepath, + title, + authors, + series, + series_index, + language, + description + FROM bookinfo; +]] + +-- Validate Device ID Exists +if G_reader_settings:hasNot("device_id") then + G_reader_settings:saveSetting("device_id", random.uuid()) +end + +-- Define DB Location +local statistics_db = DataStorage:getSettingsDir() .. "/statistics.sqlite3" +local bookinfo_db = DataStorage:getSettingsDir() .. "/bookinfo_cache.sqlite3" + +local SyncNinja = WidgetContainer:extend{ + name = "syncninja", + settings = nil, + is_doc_only = false +} + +SyncNinja.default_settings = { + server = nil, + username = nil, + password = nil, + sync_frequency = 30, + sync_activity = true, + sync_documents = true, + sync_document_files = true +} + +function SyncNinja:init() + logger.dbg("SyncNinja: init") + + -- Instance Specific (Non Interactive) + self.periodic_push_task = function() self:performSync(false) end + + -- Load Settings + self.device_id = G_reader_settings:readSetting("device_id") + self.settings = G_reader_settings:readSetting("syncninja", + self.default_settings) + + -- Register Menu Items + self.ui.menu:registerToMainMenu(self) + + -- Initial Periodic Push Schedule (5 Minutes) + self:schedulePeriodicPush(5) +end + +------------------------------------------ +-------------- UX Functions -------------- +------------------------------------------ +function SyncNinja:addToMainMenu(menu_items) + logger.dbg("SyncNinja: addToMainMenu") + menu_items.syncninja = { + text = _("Sync Ninja"), + sorting_hint = "tools", + sub_item_table = { + { + text = _("Sync Server"), + keep_menu_open = true, + tap_input_func = function(menu) + return { + title = _("Sync server address"), + input = self.settings.server or "https://", + type = "text", + callback = function(input) + self.settings.server = input ~= "" and input or nil + if menu then + menu:updateItems() + end + end + } + end + }, { + text_func = function() + return self.settings.password and (_("Logout")) or + _("Register") .. " / " .. _("Login") + end, + enabled_func = function() + return self.settings.server ~= nil + end, + keep_menu_open = true, + callback_func = function() + if self.settings.password then + return function(menu) + self:logoutUI(menu) + end + else + return function(menu) + self:loginUI(menu) + end + end + end + }, { + text = _("Manual Sync"), + keep_menu_open = true, + enabled_func = function() + return self.settings.password ~= nil and + self.settings.username ~= nil and + self.settings.server ~= nil + end, + callback = function() + UIManager:unschedule(self.performSync) + self:performSync(true) -- Interactive + end + }, { + text = _("KOSync Auth Merge"), + sub_item_table = { + { + text = _("KOSync Merge In"), + keep_menu_open = true, + callback = function() + self:mergeKOSync(MERGE_SETTINGS_IN) + end + }, { + text = _("KOSync Merge Out"), + keep_menu_open = true, + callback = function() + self:mergeKOSync(MERGE_SETTINGS_OUT) + end + } + + }, + separator = true + }, { + text_func = function() + return T(_("Sync Frequency (%1 Minutes)"), + self.settings.sync_frequency or 30) + end, + keep_menu_open = true, + callback = function(touchmenu_instance) + local SpinWidget = require("ui/widget/spinwidget") + local items = SpinWidget:new{ + text = _( + [[This value determines the cadence at which the syncs will be performed. +If set to 0, periodic sync will be disabled.]]), + value = self.settings.sync_frequency or 30, + value_min = 0, + value_max = 1440, + value_step = 30, + value_hold_step = 60, + ok_text = _("Set"), + title_text = _("Minutes between syncs"), + default_value = 30, + callback = function(spin) + self.settings.sync_frequency = spin.value > 0 and + spin.value or 30 + if touchmenu_instance then + touchmenu_instance:updateItems() + end + self:schedulePeriodicPush() + end + } + UIManager:show(items) + end + }, { + text_func = function() + return T(_("Sync Activity (%1)"), self.settings + .sync_activity == true and (_("Enabled")) or + (_("Disabled"))) + end, + sub_item_table = { + { + text = _("Enabled"), + checked_func = function() + return self.settings.sync_activity == true + end, + callback = function() + self.settings.sync_activity = true + end + }, { + text = _("Disabled"), + checked_func = function() + return self.settings.sync_activity ~= true + end, + callback = function() + self.settings.sync_activity = false + end + } + } + }, { + text_func = function() + return T(_("Sync Documents (%1)"), self.settings + .sync_documents == true and (_("Enabled")) or + (_("Disabled"))) + end, + sub_item_table = { + { + text = _("Enabled"), + checked_func = function() + return self.settings.sync_documents == true + end, + callback = function() + self.settings.sync_documents = true + end + }, { + text = _("Disabled"), + checked_func = function() + return self.settings.sync_documents ~= true + end, + callback = function() + self.settings.sync_documents = false + end + } + } + }, { + text_func = function() + return T(_("Sync Document Files (%1)"), + self.settings.sync_documents == true and + self.settings.sync_document_files == true and + (_("Enabled")) or (_("Disabled"))) + end, + enabled_func = function() + return self.settings.sync_documents == true + end, + sub_item_table = { + { + text = _("Enabled"), + checked_func = function() + return self.settings.sync_document_files == true + end, + callback = function() + self.settings.sync_document_files = true + end + }, { + text = _("Disabled"), + checked_func = function() + return self.settings.sync_document_files ~= true + end, + callback = function() + self.settings.sync_document_files = false + end + } + } + } + } + } +end + +function SyncNinja:loginUI(menu) + logger.dbg("SyncNinja: loginUI") + if NetworkMgr:willRerunWhenOnline(function() self:loginUI(menu) end) then + return + end + + local dialog + dialog = MultiInputDialog:new{ + title = _("Register/login to SyncNinja server"), + fields = { + {text = self.settings.username, hint = "username"}, + {hint = "password", text_type = "password"} + }, + buttons = { + { + { + text = _("Cancel"), + id = "close", + callback = function() + UIManager:close(dialog) + end + }, { + text = _("Login"), + callback = function() + local username, password = unpack(dialog:getFields()) + local ok, err = validateUser(username, password) + if not ok then + UIManager:show(InfoMessage:new{ + text = T(_("Cannot login: %1"), err), + timeout = 2 + }) + else + UIManager:close(dialog) + UIManager:scheduleIn(0.5, function() + self:userLogin(username, password, menu) + end) + UIManager:show(InfoMessage:new{ + text = _("Logging in. Please wait…"), + timeout = 1 + }) + end + end + }, { + text = _("Register"), + callback = function() + local username, password = unpack(dialog:getFields()) + local ok, err = validateUser(username, password) + if not ok then + UIManager:show(InfoMessage:new{ + text = T(_("Cannot register: %1"), err), + timeout = 2 + }) + else + UIManager:close(dialog) + UIManager:scheduleIn(0.5, function() + self:userRegister(username, password, menu) + end) + UIManager:show(InfoMessage:new{ + text = _("Registering. Please wait…"), + timeout = 1 + }) + end + end + } + } + } + } + UIManager:show(dialog) + dialog:onShowKeyboard() +end + +function SyncNinja:logoutUI(menu) + logger.dbg("SyncNinja: logoutUI") + self.settings.username = nil + self.settings.password = nil + if menu then menu:updateItems() end + UIManager:unschedule(self.periodic_push_task) +end + +function SyncNinja:mergeKOSync(direction) + logger.dbg("SyncNinja: mergeKOSync") + local kosync_settings = G_reader_settings:readSetting("kosync") + if kosync_settings == nil then return end + + if direction == MERGE_SETTINGS_OUT then + -- Validate Configured + if not self.settings.server or not self.settings.username or + not self.settings.password then + return UIManager:show(InfoMessage:new{ + text = _("Error: SyncNinja not configured") + }) + end + + kosync_settings.custom_server = self.settings.server .. + (self.settings.server:sub(-#"/") == + "/" and "api/ko" or "/api/ko") + kosync_settings.username = self.settings.username + kosync_settings.userkey = self.settings.password + + UIManager:show(InfoMessage:new{text = _("Synced to KOSync")}) + elseif direction == MERGE_SETTINGS_IN then + -- Validate Configured + if not kosync_settings.custom_server or not kosync_settings.username or + not kosync_settings.userkey then + return UIManager:show(InfoMessage:new{ + text = _("Error: KOSync not configured") + }) + end + + -- Validate Compatible Server + if kosync_settings.custom_server:sub(-#"/api/ko") ~= "/api/ko" and + kosync_settings.custom_server:sub(-#"/api/ko/") ~= "/api/ko/" then + return UIManager:show(InfoMessage:new{ + text = _("Error: Configured KOSync server not compatible") + }) + end + + self.settings.server = string.gsub(kosync_settings.custom_server, + "/api/ko/?$", "") + self.settings.username = kosync_settings.username + self.settings.password = kosync_settings.userkey + + UIManager:show(InfoMessage:new{text = _("Synced from KOSync")}) + end +end + +------------------------------------------ +------------- Login Functions ------------ +------------------------------------------ +function SyncNinja:userLogin(username, password, menu) + logger.dbg("SyncNinja: userLogin") + if not self.settings.server then return end + + local SyncNinjaClient = require("SyncNinjaClient") + local client = SyncNinjaClient:new{ + custom_url = self.settings.server, + service_spec = self.path .. "/api.json" + } + Device:setIgnoreInput(true) + local userkey = md5(password) + local ok, status, body = pcall(client.authorize, client, username, userkey) + if not ok then + if status then + UIManager:show(InfoMessage:new{ + text = _("An error occurred while logging in:") .. "\n" .. + status + }) + else + UIManager:show(InfoMessage:new{ + text = _("An unknown error occurred while logging in.") + }) + end + Device:setIgnoreInput(false) + return + elseif status then + self.settings.username = username + self.settings.password = userkey + if menu then menu:updateItems() end + UIManager:show(InfoMessage:new{ + text = _("Logged in to KOReader server.") + }) + + self:schedulePeriodicPush(0) + else + logger.dbg("SyncNinja: userLogin Error:", dump(body)) + end + Device:setIgnoreInput(false) +end + +function SyncNinja:userRegister(username, password, menu) + logger.dbg("SyncNinja: userRegister") + if not self.settings.server then return end + + local SyncNinjaClient = require("SyncNinjaClient") + local client = SyncNinjaClient:new{ + custom_url = self.settings.server, + service_spec = self.path .. "/api.json" + } + -- on Android to avoid ANR (no-op on other platforms) + Device:setIgnoreInput(true) + local userkey = md5(password) + local ok, status, body = pcall(client.register, client, username, userkey) + if not ok then + if status then + UIManager:show(InfoMessage:new{ + text = _("An error occurred while registering:") .. "\n" .. + status + }) + else + UIManager:show(InfoMessage:new{ + text = _("An unknown error occurred while registering.") + }) + end + elseif status then + self.settings.username = username + self.settings.password = userkey + if menu then menu:updateItems() end + UIManager:show(InfoMessage:new{ + text = _("Registered to KOReader server.") + }) + + self:schedulePeriodicPush(0) + else + UIManager:show(InfoMessage:new{ + text = body and body.message or _("Unknown server error") + }) + end + Device:setIgnoreInput(false) +end + +------------------------------------------ +------------- Sync Functions ------------- +------------------------------------------ +function SyncNinja:schedulePeriodicPush(minutes) + logger.dbg("SyncNinja: schedulePeriodicPush") + + -- Validate Configured + if not self.settings then return end + if not self.settings.username then return end + if not self.settings.password then return end + if not self.settings.server then return end + + -- Unschedule & Schedule + local sync_frequency = minutes or self.settings.sync_frequency or 30 + UIManager:unschedule(self.periodic_push_task) + UIManager:scheduleIn(60 * sync_frequency, self.periodic_push_task) +end + +function SyncNinja:performSync(interactive) + logger.dbg("SyncNinja: performSync") + + -- Upload Activity & Check Documents + self:checkActivity(interactive) + self:checkDocuments(interactive) + + if interactive == true then + UIManager:show(InfoMessage:new{ + text = _("SyncNinja: Manual Sync Success"), + timeout = 3 + }) + end + + -- Schedule Push Again + self:schedulePeriodicPush() +end + +function SyncNinja:checkActivity(interactive) + logger.dbg("SyncNinja: checkActivity") + + -- Ensure Activity Sync Enabled + if self.settings.sync_activity ~= true then return end + + -- API Callback Function + local callback_func = function(ok, body) + if not ok then + -- TODO: if interactive + UIManager:show(InfoMessage:new{ + text = _("SyncNinja: checkActivity Error"), + timeout = 3 + }) + return logger.dbg("SyncNinja: checkActivity Error:", dump(body)) + end + + local last_sync = body.last_sync + local activity_data = self:getStatisticsActivity(last_sync) + + -- Activity Data Exists + if not (next(activity_data) == nil) then + self:uploadActivity(activity_data, interactive) + end + end + + -- API Call + local SyncNinjaClient = require("SyncNinjaClient") + local client = SyncNinjaClient:new{ + custom_url = self.settings.server, + service_spec = self.path .. "/api.json" + } + local ok, err = pcall(client.check_activity, client, self.settings.username, + self.settings.password, self.device_id, callback_func) +end + +function SyncNinja:uploadActivity(activity_data, interactive) + logger.dbg("SyncNinja: uploadActivity") + + -- API Callback Function + local callback_func = function(ok, body) + if not ok then + -- TODO: if interactive + UIManager:show(InfoMessage:new{ + text = _("SyncNinja: uploadActivity Error"), + timeout = 3 + }) + + return logger.dbg("SyncNinja: uploadActivity Error:", dump(body)) + end + end + + -- API Call + local SyncNinjaClient = require("SyncNinjaClient") + local client = SyncNinjaClient:new{ + custom_url = self.settings.server, + service_spec = self.path .. "/api.json" + } + local ok, err = pcall(client.add_activity, client, self.settings.username, + self.settings.password, self.device_id, Device.model, + activity_data, callback_func) +end + +function SyncNinja:checkDocuments(interactive) + logger.dbg("SyncNinja: checkDocuments") + + -- ensure document sync enabled + if self.settings.sync_documents ~= true then return end + + -- API Request Data + local doc_metadata = self:getLocalDocumentMetadata() + local doc_ids = self:getLocalDocumentIDs(doc_metadata) + + -- API Callback Function + local callback_func = function(ok, body) + if not ok then + -- TODO: if interactive + UIManager:show(InfoMessage:new{ + text = _("SyncNinja: checkDocuments Error"), + timeout = 3 + }) + return logger.dbg("SyncNinja: checkDocuments Error:", dump(body)) + end + + -- Documents Wanted + if not (next(body.want) == nil) then + local hash_want = {} + for _, v in pairs(body.want) do hash_want[v] = true end + + local upload_doc_metadata = {} + for _, v in pairs(doc_metadata) do + if hash_want[v.id] == true then + table.insert(upload_doc_metadata, v) + end + end + + self:uploadDocuments(upload_doc_metadata, interactive) + end + + -- Documents Provided + if not (next(body.give) == nil) then + self:downloadDocuments(body.give, interactive) + end + end + + -- API Call + local SyncNinjaClient = require("SyncNinjaClient") + local client = SyncNinjaClient:new{ + custom_url = self.settings.server, + service_spec = self.path .. "/api.json" + } + local ok, err = pcall(client.check_documents, client, + self.settings.username, self.settings.password, + self.device_id, Device.model, doc_ids, callback_func) +end + +function SyncNinja:downloadDocuments(doc_metadata, interactive) + logger.dbg("SyncNinja: downloadDocuments") + + -- TODO +end + +function SyncNinja:uploadDocuments(doc_metadata, interactive) + logger.dbg("SyncNinja: uploadDocuments") + + -- Ensure Document Sync Enabled + if self.settings.sync_documents ~= true then return end + + -- API Callback Function + local callback_func = function(ok, body) + if not ok then + -- TODO: if interactive + UIManager:show(InfoMessage:new{ + text = _("SyncNinja: uploadDocuments Error"), + timeout = 3 + }) + return logger.dbg("SyncNinja: uploadDocuments Error:", dump(body)) + end + end + + -- API Client + local SyncNinjaClient = require("SyncNinjaClient") + local client = SyncNinjaClient:new{ + custom_url = self.settings.server, + service_spec = self.path .. "/api.json" + } + + -- API Initial Metadata + local ok, err = pcall(client.add_documents, client, self.settings.username, + self.settings.password, doc_metadata, callback_func) + + -- Ensure Document File Sync Enabled + if self.settings.sync_document_files ~= true then return end + if interactive ~= true then return end + + -- API File Upload + local confirm_upload_callback = function() + for _, v in pairs(doc_metadata) do + if v.filepath ~= nil then + local ok, err = pcall(client.upload_document, client, + self.settings.username, + self.settings.password, v.id, v.filepath, + callback_func) + end + end + end + + UIManager:show(ConfirmBox:new{ + text = _("Upload documents? This can take awhile."), + ok_text = _("Yes"), + ok_callback = confirm_upload_callback + }) +end + +------------------------------------------ +------------ Getter Functions ------------ +------------------------------------------ +function SyncNinja:getLocalDocumentIDs(doc_metadata) + logger.dbg("SyncNinja: getLocalDocumentIDs") + + local document_ids = {} + + if doc_metadata == nil then + doc_metadata = self:getLocalDocumentMetadata() + end + + for _, v in pairs(doc_metadata) do table.insert(document_ids, v.id) end + + return document_ids +end + +function SyncNinja:getLocalDocumentMetadata() + logger.dbg("SyncNinja: getLocalDocumentMetadata") + + local all_documents = {} + + local documents_kv = self:getStatisticsBookKV() + local bookinfo_books = self:getBookInfoBookKV() + + for _, v in pairs(ReadHistory.hist) do + if DocSettings:hasSidecarFile(v.file) then + local docsettings = DocSettings:open(v.file) + + -- Ensure Partial MD5 Exists + local pmd5 = docsettings:readSetting("partial_md5_checksum") + if not pmd5 then + pmd5 = self:getPartialMd5(v.file) + docsettings:saveSetting("partial_md5_checksum", pmd5) + end + + -- Get Document Props + local doc_props = docsettings:readSetting("doc_props") + local fdoc = bookinfo_books[v.file] or {} + + -- Update or Create + if documents_kv[pmd5] ~= nil then + local doc = documents_kv[pmd5] + + -- Merge Statistics, History, and BookInfo + doc.title = doc.title or doc_props.title or fdoc.title + doc.author = doc.author or doc_props.authors or fdoc.author + doc.series = doc.series or doc_props.series or fdoc.series + doc.lang = doc.lang or doc_props.language or fdoc.lang + + -- Merge History and BookInfo + doc.series_index = doc_props.series_index or fdoc.series_index + doc.description = doc_props.description or fdoc.description + doc.filepath = v.file + else + -- Merge History and BookInfo + documents_kv[pmd5] = { + title = doc_props.title or fdoc.title, + author = doc_props.authors or fdoc.author, + series = doc_props.series or fdoc.series, + series_index = doc_props.series_index or fdoc.series_index, + lang = doc_props.language or fdoc.lang, + description = doc_props.description or fdoc.description, + filepath = v.file + } + end + end + end + + -- Convert KV -> Array + for pmd5, v in pairs(documents_kv) do + table.insert(all_documents, { + id = pmd5, + title = v.title, + author = v.author, + series = v.series, + series_index = v.series_index, + lang = v.lang, + description = v.description, + filepath = v.filepath + }) + end + + return all_documents +end + +function SyncNinja:getStatisticsActivity(timestamp) + logger.dbg("SyncNinja: getStatisticsActivity") + + local all_data = {} + local conn = SQ3.open(statistics_db) + local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY, + timestamp)) + local rows = stmt:resultset("i", 1000) + conn:close() + + -- No Results + if rows == nil then return all_data end + + -- Normalize + for i, v in pairs(rows[1]) do + table.insert(all_data, { + document = rows[1][i], + start_time = tonumber(rows[2][i]), + duration = tonumber(rows[3][i]), + current_page = tonumber(rows[4][i]), + total_pages = tonumber(rows[5][i]) + }) + end + + return all_data +end + +-- Returns KEY:VAL (MD5:) +function SyncNinja:getStatisticsBookKV() + logger.dbg("SyncNinja: getStatisticsBookKV") + + local all_data = {} + local conn = SQ3.open(statistics_db) + local stmt = conn:prepare(STATISTICS_BOOK_QUERY) + local rows = stmt:resultset("i", 1000) + conn:close() + + -- No Results + if rows == nil then return all_data end + + -- Normalize + for i, v in pairs(rows[1]) do + local pmd5 = rows[1][i] + all_data[pmd5] = { + title = rows[2][i], + author = rows[3][i], + series = rows[4][i], + lang = rows[5][i] + } + end + + return all_data +end + +-- Returns KEY:VAL (FILEPATH:
) +function SyncNinja:getBookInfoBookKV() + logger.dbg("SyncNinja: getBookInfoBookKV") + + local all_data = {} + local conn = SQ3.open(bookinfo_db) + local stmt = conn:prepare(BOOKINFO_BOOK_QUERY) + local rows = stmt:resultset("i", 1000) + conn:close() + + -- No Results + if rows == nil then return all_data end + + -- Normalize + for i, v in pairs(rows[1]) do + filepath = rows[1][i] + all_data[filepath] = { + title = rows[2][i], + author = rows[3][i], + series = rows[4][i], + series_index = tonumber(rows[5][i]), + lang = rows[6][i], + description = rows[7][i] + } + end + + return all_data +end + +function SyncNinja:getPartialMd5(file) + logger.dbg("SyncNinja: getPartialMd5") + + if file == nil then return nil end + local bit = require("bit") + local lshift = bit.lshift + local step, size = 1024, 1024 + local update = md5() + local file_handle = io.open(file, 'rb') + if file_handle == nil then return nil end + for i = -1, 10 do + file_handle:seek("set", lshift(step, 2 * i)) + local sample = file_handle:read(size) + if sample then + update(sample) + else + break + end + end + file_handle:close() + return update() +end + +return SyncNinja diff --git a/cmd/main.go b/cmd/main.go new file mode 100644 index 0000000..b529da2 --- /dev/null +++ b/cmd/main.go @@ -0,0 +1,55 @@ +package main + +import ( + "os" + "os/signal" + + log "github.com/sirupsen/logrus" + "github.com/urfave/cli/v2" + "reichard.io/bbank/server" +) + +type UTCFormatter struct { + log.Formatter +} + +func (u UTCFormatter) Format(e *log.Entry) ([]byte, error) { + e.Time = e.Time.UTC() + return u.Formatter.Format(e) +} + +func main() { + log.SetFormatter(UTCFormatter{&log.TextFormatter{FullTimestamp: true}}) + + app := &cli.App{ + Name: "Book Bank", + Usage: "A self hosted e-book progress tracker.", + Commands: []*cli.Command{ + { + Name: "serve", + Aliases: []string{"s"}, + Usage: "Start Book Bank web server.", + Action: cmdServer, + }, + }, + } + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } +} + +func cmdServer(ctx *cli.Context) error { + log.Info("Starting Book Bank Server") + server := server.NewServer() + server.StartServer() + + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + <-c + + server.StopServer() + os.Exit(0) + + return nil +} diff --git a/config/config.go b/config/config.go new file mode 100644 index 0000000..9766dbc --- /dev/null +++ b/config/config.go @@ -0,0 +1,34 @@ +package config + +import ( + "os" +) + +type Config struct { + DBType string + DBName string + DBPassword string + ConfigPath string + DataPath string + ListenPort string + Version string +} + +func Load() *Config { + return &Config{ + DBType: getEnv("DATABASE_TYPE", "SQLite"), + DBName: getEnv("DATABASE_NAME", "bbank"), + DBPassword: getEnv("DATABASE_PASSWORD", ""), + ConfigPath: getEnv("CONFIG_PATH", "/config"), + DataPath: getEnv("DATA_PATH", "/data"), + ListenPort: getEnv("LISTEN_PORT", "8585"), + Version: "0.0.1", + } +} + +func getEnv(key, fallback string) string { + if value, ok := os.LookupEnv(key); ok { + return value + } + return fallback +} diff --git a/database/db.go b/database/db.go new file mode 100644 index 0000000..37d600d --- /dev/null +++ b/database/db.go @@ -0,0 +1,31 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.21.0 + +package database + +import ( + "context" + "database/sql" +) + +type DBTX interface { + ExecContext(context.Context, string, ...interface{}) (sql.Result, error) + PrepareContext(context.Context, string) (*sql.Stmt, error) + QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error) + QueryRowContext(context.Context, string, ...interface{}) *sql.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx *sql.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/database/manager.go b/database/manager.go new file mode 100644 index 0000000..39bd6ef --- /dev/null +++ b/database/manager.go @@ -0,0 +1,66 @@ +package database + +import ( + "context" + "database/sql" + _ "embed" + "path" + + sqlite "github.com/mattn/go-sqlite3" + log "github.com/sirupsen/logrus" + "reichard.io/bbank/config" +) + +type DBManager struct { + DB *sql.DB + Ctx context.Context + Queries *Queries +} + +//go:embed schema.sql +var ddl string + +func foobar() string { + log.Info("WTF") + return "" +} + +func NewMgr(c *config.Config) *DBManager { + // Create Manager + dbm := &DBManager{ + Ctx: context.Background(), + } + + // Create Database + if c.DBType == "SQLite" { + + sql.Register("sqlite3_custom", &sqlite.SQLiteDriver{ + ConnectHook: func(conn *sqlite.SQLiteConn) error { + if err := conn.RegisterFunc("test_func", foobar, false); err != nil { + log.Info("Error Registering") + return err + } + return nil + }, + }) + + dbLocation := path.Join(c.ConfigPath, "bbank.db") + + var err error + dbm.DB, err = sql.Open("sqlite3_custom", dbLocation) + if err != nil { + log.Fatal(err) + } + } else { + log.Fatal("Unsupported Database") + } + + // Create Tables + if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil { + log.Fatal(err) + } + + dbm.Queries = New(dbm.DB) + + return dbm +} diff --git a/database/models.go b/database/models.go new file mode 100644 index 0000000..d641ff4 --- /dev/null +++ b/database/models.go @@ -0,0 +1,79 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.21.0 + +package database + +import ( + "time" +) + +type Activity struct { + ID int64 `json:"id"` + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + StartTime time.Time `json:"start_time"` + Duration int64 `json:"duration"` + CurrentPage int64 `json:"current_page"` + TotalPages int64 `json:"total_pages"` + CreatedAt time.Time `json:"created_at"` +} + +type Device struct { + ID string `json:"id"` + UserID string `json:"user_id"` + DeviceName string `json:"device_name"` + CreatedAt time.Time `json:"created_at"` + Sync bool `json:"sync"` +} + +type Document struct { + ID string `json:"id"` + Md5 *string `json:"md5"` + Filepath *string `json:"filepath"` + Title *string `json:"title"` + Author *string `json:"author"` + Series *string `json:"series"` + SeriesIndex *int64 `json:"series_index"` + Lang *string `json:"lang"` + Description *string `json:"description"` + Olid *string `json:"-"` + Synced bool `json:"-"` + Deleted bool `json:"-"` + UpdatedAt time.Time `json:"updated_at"` + CreatedAt time.Time `json:"created_at"` +} + +type DocumentDeviceSync struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + LastSynced time.Time `json:"last_synced"` + Sync bool `json:"sync"` +} + +type DocumentProgress struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + Percentage float64 `json:"percentage"` + Progress string `json:"progress"` + CreatedAt time.Time `json:"created_at"` +} + +type RescaledActivity struct { + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + UserID string `json:"user_id"` + StartTime time.Time `json:"start_time"` + Page int64 `json:"page"` + Duration int64 `json:"duration"` +} + +type User struct { + ID string `json:"id"` + Pass string `json:"-"` + Admin bool `json:"-"` + CreatedAt time.Time `json:"created_at"` +} diff --git a/database/query.sql b/database/query.sql new file mode 100644 index 0000000..99fa14c --- /dev/null +++ b/database/query.sql @@ -0,0 +1,427 @@ +-- name: CreateUser :execrows +INSERT INTO users (id, pass) +VALUES (?, ?) +ON CONFLICT DO NOTHING; + +-- name: GetUser :one +SELECT * FROM users +WHERE id = $user_id LIMIT 1; + +-- name: UpsertDocument :one +INSERT INTO documents ( + id, + md5, + filepath, + title, + author, + series, + series_index, + lang, + description, + olid +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +ON CONFLICT DO UPDATE +SET + md5 = COALESCE(excluded.md5, md5), + filepath = COALESCE(excluded.filepath, filepath), + title = COALESCE(excluded.title, title), + author = COALESCE(excluded.author, author), + series = COALESCE(excluded.series, series), + series_index = COALESCE(excluded.series_index, series_index), + lang = COALESCE(excluded.lang, lang), + description = COALESCE(excluded.description, description), + olid = COALESCE(excluded.olid, olid) +RETURNING *; + +-- name: DeleteDocument :execrows +UPDATE documents +SET + deleted = 1 +WHERE id = $id; + +-- name: UpdateDocumentSync :one +UPDATE documents +SET + synced = $synced +WHERE id = $id +RETURNING *; + +-- name: UpdateDocumentDeleted :one +UPDATE documents +SET + deleted = $deleted +WHERE id = $id +RETURNING *; + +-- name: GetDocument :one +SELECT * FROM documents +WHERE id = $document_id LIMIT 1; + +-- name: UpsertDevice :one +INSERT INTO devices (id, user_id, device_name) +VALUES (?, ?, ?) +ON CONFLICT DO UPDATE +SET + device_name = COALESCE(excluded.device_name, device_name) +RETURNING *; + +-- name: GetDevice :one +SELECT * FROM devices +WHERE id = $device_id LIMIT 1; + +-- name: UpdateProgress :one +INSERT OR REPLACE INTO document_progress ( + user_id, + document_id, + device_id, + percentage, + progress +) +VALUES (?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetProgress :one +SELECT + document_progress.*, + devices.device_name +FROM document_progress +JOIN devices ON document_progress.device_id = devices.id +WHERE + document_progress.user_id = $user_id + AND document_progress.document_id = $document_id +ORDER BY + document_progress.created_at + DESC +LIMIT 1; + +-- name: GetLastActivity :one +SELECT start_time +FROM activity +WHERE device_id = $device_id +AND user_id = $user_id +ORDER BY start_time DESC LIMIT 1; + +-- name: AddActivity :one +INSERT INTO activity ( + user_id, + document_id, + device_id, + start_time, + duration, + current_page, + total_pages +) +VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING *; + +-- name: GetMissingDocuments :many +SELECT documents.* FROM documents +WHERE + documents.filepath IS NOT NULL + AND documents.deleted = false + AND documents.id NOT IN (sqlc.slice('document_ids')); + +-- name: GetWantedDocuments :many +SELECT CAST(value AS TEXT) AS id +FROM json_each(?1) +LEFT JOIN documents +ON value = documents.id +WHERE ( + documents.id IS NOT NULL + AND documents.synced = false +) +OR (documents.id IS NULL) +OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT); + +-- name: GetDeletedDocuments :many +SELECT documents.id +FROM documents +WHERE + documents.deleted = true + AND documents.id IN (sqlc.slice('document_ids')); + +-- name: GetDocuments :many +SELECT * FROM documents +ORDER BY created_at DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetDocumentsWithStats :many +WITH true_progress AS ( + SELECT + start_time AS last_read, + SUM(duration) / 60 AS total_time_minutes, + document_id, + current_page, + total_pages, + ROUND(CAST(current_page AS REAL) / CAST(total_pages AS REAL) * 100, 2) AS percentage + FROM activity + WHERE user_id = $user_id + GROUP BY document_id + HAVING MAX(start_time) +) +SELECT + documents.*, + + CAST(IFNULL(current_page, 0) AS INTEGER) AS current_page, + CAST(IFNULL(total_pages, 0) AS INTEGER) AS total_pages, + CAST(IFNULL(total_time_minutes, 0) AS INTEGER) AS total_time_minutes, + + CAST( + STRFTIME('%Y-%m-%dT%H:%M:%SZ', IFNULL(last_read, "1970-01-01") + ) AS TEXT) AS last_read, + + CAST(CASE + WHEN percentage > 97.0 THEN 100.0 + WHEN percentage IS NULL THEN 0.0 + ELSE percentage + END AS REAL) AS percentage + +FROM documents +LEFT JOIN true_progress ON document_id = id +ORDER BY last_read DESC, created_at DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetUsers :many +SELECT * FROM users +WHERE + users.id = $user + OR ?1 IN ( + SELECT id + FROM users + WHERE id = $user + AND admin = 1 + ) +ORDER BY created_at DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetActivity :many +SELECT * FROM activity +WHERE + user_id = $user_id + AND ( + ($doc_filter = TRUE AND document_id = $document_id) + OR $doc_filter = FALSE + ) +ORDER BY start_time DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetDevices :many +SELECT * FROM devices +WHERE user_id = $user_id +ORDER BY created_at DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetDocumentReadStats :one +SELECT + count(DISTINCT page) AS pages_read, + sum(duration) AS total_time +FROM rescaled_activity +WHERE document_id = $document_id +AND user_id = $user_id +AND start_time >= $start_time; + +-- name: GetDocumentReadStatsCapped :one +WITH capped_stats AS ( + SELECT min(sum(duration), CAST($page_duration_cap AS INTEGER)) AS durations + FROM rescaled_activity + WHERE document_id = $document_id + AND user_id = $user_id + AND start_time >= $start_time + GROUP BY page +) +SELECT + CAST(count(*) AS INTEGER) AS pages_read, + CAST(sum(durations) AS INTEGER) AS total_time +FROM capped_stats; + +-- name: GetDocumentDaysRead :one +WITH document_days AS ( + SELECT date(start_time, 'localtime') AS dates + FROM rescaled_activity + WHERE document_id = $document_id + AND user_id = $user_id + GROUP BY dates +) +SELECT CAST(count(*) AS INTEGER) AS days_read +FROM document_days; + +-- name: GetUserDayStreaks :one +WITH document_days AS ( + SELECT date(start_time, 'localtime') AS read_day + FROM activity + WHERE user_id = $user_id + GROUP BY read_day + ORDER BY read_day DESC +), +partitions AS ( + SELECT + document_days.*, + row_number() OVER ( + PARTITION BY 1 ORDER BY read_day DESC + ) AS seqnum + FROM document_days +), +streaks AS ( + SELECT + count(*) AS streak, + MIN(read_day) AS start_date, + MAX(read_day) AS end_date + FROM partitions + GROUP BY date(read_day, '+' || seqnum || ' day') + ORDER BY end_date DESC +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date + FROM streaks +) +SELECT + CAST(max_streak AS INTEGER), + CAST(max_streak_start_date AS TEXT), + CAST(max_streak_end_date AS TEXT), + streak AS current_streak, + CAST(start_date AS TEXT) AS current_streak_start_date, + CAST(end_date AS TEXT) AS current_streak_end_date +FROM max_streak, streaks LIMIT 1; + +-- name: GetUserWeekStreaks :one +WITH document_weeks AS ( + SELECT STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') AS read_week + FROM activity + WHERE user_id = $user_id + GROUP BY read_week + ORDER BY read_week DESC +), +partitions AS ( + SELECT + document_weeks.*, + row_number() OVER ( + PARTITION BY 1 ORDER BY read_week DESC + ) AS seqnum + FROM document_weeks +), +streaks AS ( + SELECT + count(*) AS streak, + MIN(read_week) AS start_date, + MAX(read_week) AS end_date + FROM partitions + GROUP BY date(read_week, '+' || (seqnum * 7) || ' day') + ORDER BY end_date DESC +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date + FROM streaks +) +SELECT + CAST(max_streak AS INTEGER), + CAST(max_streak_start_date AS TEXT), + CAST(max_streak_end_date AS TEXT), + streak AS current_streak, + CAST(start_date AS TEXT) AS current_streak_start_date, + CAST(end_date AS TEXT) AS current_streak_end_date +FROM max_streak, streaks LIMIT 1; + +-- name: GetUserWindowStreaks :one +WITH document_windows AS ( + SELECT CASE + WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') + WHEN ?2 = "DAY" THEN date(start_time, 'localtime') + END AS read_window + FROM activity + WHERE user_id = $user_id + AND CAST($window AS TEXT) = CAST($window AS TEXT) + GROUP BY read_window + ORDER BY read_window DESC +), +partitions AS ( + SELECT + document_windows.*, + row_number() OVER ( + PARTITION BY 1 ORDER BY read_window DESC + ) AS seqnum + FROM document_windows +), +streaks AS ( + SELECT + count(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date + FROM partitions + GROUP BY CASE + WHEN ?2 = "DAY" THEN date(read_window, '+' || seqnum || ' day') + WHEN ?2 = "WEEK" THEN date(read_window, '+' || (seqnum * 7) || ' day') + END + ORDER BY end_date DESC +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date + FROM streaks +) +SELECT + CAST(max_streak AS INTEGER), + CAST(max_streak_start_date AS TEXT), + CAST(max_streak_end_date AS TEXT), + streak AS current_streak, + CAST(start_date AS TEXT) AS current_streak_start_date, + CAST(end_date AS TEXT) AS current_streak_end_date +FROM max_streak, streaks LIMIT 1; + +-- name: GetDatabaseInfo :one +SELECT + (SELECT count(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size, + (SELECT count(rowid) FROM documents) AS documents_size, + (SELECT count(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size, + (SELECT count(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size +LIMIT 1; + +-- name: GetDailyReadStats :many +WITH RECURSIVE last_30_days (date) AS ( + SELECT date('now') AS date + UNION ALL + SELECT date(date, '-1 days') + FROM last_30_days + LIMIT 30 +), +activity_records AS ( + SELECT + sum(duration) AS seconds_read, + date(start_time, 'localtime') AS day + FROM activity + WHERE user_id = $user_id + GROUP BY day + ORDER BY day DESC + LIMIT 30 +) +SELECT + CAST(date AS TEXT), + CAST(CASE + WHEN seconds_read IS NULL THEN 0 + ELSE seconds_read / 60 + END AS INTEGER) AS minutes_read +FROM last_30_days +LEFT JOIN activity_records ON activity_records.day == last_30_days.date +ORDER BY date DESC +LIMIT 30; + +-- SELECT +-- sum(duration) / 60 AS minutes_read, +-- date(start_time, 'localtime') AS day +-- FROM activity +-- GROUP BY day +-- ORDER BY day DESC +-- LIMIT 10; diff --git a/database/query.sql.go b/database/query.sql.go new file mode 100644 index 0000000..ac38066 --- /dev/null +++ b/database/query.sql.go @@ -0,0 +1,1263 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.21.0 +// source: query.sql + +package database + +import ( + "context" + "database/sql" + "strings" + "time" +) + +const addActivity = `-- name: AddActivity :one +INSERT INTO activity ( + user_id, + document_id, + device_id, + start_time, + duration, + current_page, + total_pages +) +VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING id, user_id, document_id, device_id, start_time, duration, current_page, total_pages, created_at +` + +type AddActivityParams struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + StartTime time.Time `json:"start_time"` + Duration int64 `json:"duration"` + CurrentPage int64 `json:"current_page"` + TotalPages int64 `json:"total_pages"` +} + +func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activity, error) { + row := q.db.QueryRowContext(ctx, addActivity, + arg.UserID, + arg.DocumentID, + arg.DeviceID, + arg.StartTime, + arg.Duration, + arg.CurrentPage, + arg.TotalPages, + ) + var i Activity + err := row.Scan( + &i.ID, + &i.UserID, + &i.DocumentID, + &i.DeviceID, + &i.StartTime, + &i.Duration, + &i.CurrentPage, + &i.TotalPages, + &i.CreatedAt, + ) + return i, err +} + +const createUser = `-- name: CreateUser :execrows +INSERT INTO users (id, pass) +VALUES (?, ?) +ON CONFLICT DO NOTHING +` + +type CreateUserParams struct { + ID string `json:"id"` + Pass string `json:"-"` +} + +func (q *Queries) CreateUser(ctx context.Context, arg CreateUserParams) (int64, error) { + result, err := q.db.ExecContext(ctx, createUser, arg.ID, arg.Pass) + if err != nil { + return 0, err + } + return result.RowsAffected() +} + +const deleteDocument = `-- name: DeleteDocument :execrows +UPDATE documents +SET + deleted = 1 +WHERE id = ?1 +` + +func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error) { + result, err := q.db.ExecContext(ctx, deleteDocument, id) + if err != nil { + return 0, err + } + return result.RowsAffected() +} + +const getActivity = `-- name: GetActivity :many +SELECT id, user_id, document_id, device_id, start_time, duration, current_page, total_pages, created_at FROM activity +WHERE + user_id = ?1 + AND ( + (?2 = TRUE AND document_id = ?3) + OR ?2 = FALSE + ) +ORDER BY start_time DESC +LIMIT ?5 +OFFSET ?4 +` + +type GetActivityParams struct { + UserID string `json:"user_id"` + DocFilter interface{} `json:"doc_filter"` + DocumentID string `json:"document_id"` + Offset int64 `json:"offset"` + Limit int64 `json:"limit"` +} + +func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Activity, error) { + rows, err := q.db.QueryContext(ctx, getActivity, + arg.UserID, + arg.DocFilter, + arg.DocumentID, + arg.Offset, + arg.Limit, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Activity + for rows.Next() { + var i Activity + if err := rows.Scan( + &i.ID, + &i.UserID, + &i.DocumentID, + &i.DeviceID, + &i.StartTime, + &i.Duration, + &i.CurrentPage, + &i.TotalPages, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getDailyReadStats = `-- name: GetDailyReadStats :many +WITH RECURSIVE last_30_days (date) AS ( + SELECT date('now') AS date + UNION ALL + SELECT date(date, '-1 days') + FROM last_30_days + LIMIT 30 +), +activity_records AS ( + SELECT + sum(duration) AS seconds_read, + date(start_time, 'localtime') AS day + FROM activity + WHERE user_id = ?1 + GROUP BY day + ORDER BY day DESC + LIMIT 30 +) +SELECT + CAST(date AS TEXT), + CAST(CASE + WHEN seconds_read IS NULL THEN 0 + ELSE seconds_read / 60 + END AS INTEGER) AS minutes_read +FROM last_30_days +LEFT JOIN activity_records ON activity_records.day == last_30_days.date +ORDER BY date DESC +LIMIT 30 +` + +type GetDailyReadStatsRow struct { + Date string `json:"date"` + MinutesRead int64 `json:"minutes_read"` +} + +func (q *Queries) GetDailyReadStats(ctx context.Context, userID string) ([]GetDailyReadStatsRow, error) { + rows, err := q.db.QueryContext(ctx, getDailyReadStats, userID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetDailyReadStatsRow + for rows.Next() { + var i GetDailyReadStatsRow + if err := rows.Scan(&i.Date, &i.MinutesRead); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getDatabaseInfo = `-- name: GetDatabaseInfo :one +SELECT + (SELECT count(rowid) FROM activity WHERE activity.user_id = ?1) AS activity_size, + (SELECT count(rowid) FROM documents) AS documents_size, + (SELECT count(rowid) FROM document_progress WHERE document_progress.user_id = ?1) AS progress_size, + (SELECT count(rowid) FROM devices WHERE devices.user_id = ?1) AS devices_size +LIMIT 1 +` + +type GetDatabaseInfoRow struct { + ActivitySize int64 `json:"activity_size"` + DocumentsSize int64 `json:"documents_size"` + ProgressSize int64 `json:"progress_size"` + DevicesSize int64 `json:"devices_size"` +} + +func (q *Queries) GetDatabaseInfo(ctx context.Context, userID string) (GetDatabaseInfoRow, error) { + row := q.db.QueryRowContext(ctx, getDatabaseInfo, userID) + var i GetDatabaseInfoRow + err := row.Scan( + &i.ActivitySize, + &i.DocumentsSize, + &i.ProgressSize, + &i.DevicesSize, + ) + return i, err +} + +const getDeletedDocuments = `-- name: GetDeletedDocuments :many +SELECT documents.id +FROM documents +WHERE + documents.deleted = true + AND documents.id IN (/*SLICE:document_ids*/?) +` + +func (q *Queries) GetDeletedDocuments(ctx context.Context, documentIds []string) ([]string, error) { + query := getDeletedDocuments + var queryParams []interface{} + if len(documentIds) > 0 { + for _, v := range documentIds { + queryParams = append(queryParams, v) + } + query = strings.Replace(query, "/*SLICE:document_ids*/?", strings.Repeat(",?", len(documentIds))[1:], 1) + } else { + query = strings.Replace(query, "/*SLICE:document_ids*/?", "NULL", 1) + } + rows, err := q.db.QueryContext(ctx, query, queryParams...) + if err != nil { + return nil, err + } + defer rows.Close() + var items []string + for rows.Next() { + var id string + if err := rows.Scan(&id); err != nil { + return nil, err + } + items = append(items, id) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getDevice = `-- name: GetDevice :one +SELECT id, user_id, device_name, created_at, sync FROM devices +WHERE id = ?1 LIMIT 1 +` + +func (q *Queries) GetDevice(ctx context.Context, deviceID string) (Device, error) { + row := q.db.QueryRowContext(ctx, getDevice, deviceID) + var i Device + err := row.Scan( + &i.ID, + &i.UserID, + &i.DeviceName, + &i.CreatedAt, + &i.Sync, + ) + return i, err +} + +const getDevices = `-- name: GetDevices :many +SELECT id, user_id, device_name, created_at, sync FROM devices +WHERE user_id = ?1 +ORDER BY created_at DESC +LIMIT ?3 +OFFSET ?2 +` + +type GetDevicesParams struct { + UserID string `json:"user_id"` + Offset int64 `json:"offset"` + Limit int64 `json:"limit"` +} + +func (q *Queries) GetDevices(ctx context.Context, arg GetDevicesParams) ([]Device, error) { + rows, err := q.db.QueryContext(ctx, getDevices, arg.UserID, arg.Offset, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Device + for rows.Next() { + var i Device + if err := rows.Scan( + &i.ID, + &i.UserID, + &i.DeviceName, + &i.CreatedAt, + &i.Sync, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getDocument = `-- name: GetDocument :one +SELECT id, md5, filepath, title, author, series, series_index, lang, description, olid, synced, deleted, updated_at, created_at FROM documents +WHERE id = ?1 LIMIT 1 +` + +func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document, error) { + row := q.db.QueryRowContext(ctx, getDocument, documentID) + var i Document + err := row.Scan( + &i.ID, + &i.Md5, + &i.Filepath, + &i.Title, + &i.Author, + &i.Series, + &i.SeriesIndex, + &i.Lang, + &i.Description, + &i.Olid, + &i.Synced, + &i.Deleted, + &i.UpdatedAt, + &i.CreatedAt, + ) + return i, err +} + +const getDocumentDaysRead = `-- name: GetDocumentDaysRead :one +WITH document_days AS ( + SELECT date(start_time, 'localtime') AS dates + FROM rescaled_activity + WHERE document_id = ?1 + AND user_id = ?2 + GROUP BY dates +) +SELECT CAST(count(*) AS INTEGER) AS days_read +FROM document_days +` + +type GetDocumentDaysReadParams struct { + DocumentID string `json:"document_id"` + UserID string `json:"user_id"` +} + +func (q *Queries) GetDocumentDaysRead(ctx context.Context, arg GetDocumentDaysReadParams) (int64, error) { + row := q.db.QueryRowContext(ctx, getDocumentDaysRead, arg.DocumentID, arg.UserID) + var days_read int64 + err := row.Scan(&days_read) + return days_read, err +} + +const getDocumentReadStats = `-- name: GetDocumentReadStats :one +SELECT + count(DISTINCT page) AS pages_read, + sum(duration) AS total_time +FROM rescaled_activity +WHERE document_id = ?1 +AND user_id = ?2 +AND start_time >= ?3 +` + +type GetDocumentReadStatsParams struct { + DocumentID string `json:"document_id"` + UserID string `json:"user_id"` + StartTime time.Time `json:"start_time"` +} + +type GetDocumentReadStatsRow struct { + PagesRead int64 `json:"pages_read"` + TotalTime sql.NullFloat64 `json:"total_time"` +} + +func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadStatsParams) (GetDocumentReadStatsRow, error) { + row := q.db.QueryRowContext(ctx, getDocumentReadStats, arg.DocumentID, arg.UserID, arg.StartTime) + var i GetDocumentReadStatsRow + err := row.Scan(&i.PagesRead, &i.TotalTime) + return i, err +} + +const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one +WITH capped_stats AS ( + SELECT min(sum(duration), CAST(?1 AS INTEGER)) AS durations + FROM rescaled_activity + WHERE document_id = ?2 + AND user_id = ?3 + AND start_time >= ?4 + GROUP BY page +) +SELECT + CAST(count(*) AS INTEGER) AS pages_read, + CAST(sum(durations) AS INTEGER) AS total_time +FROM capped_stats +` + +type GetDocumentReadStatsCappedParams struct { + PageDurationCap int64 `json:"page_duration_cap"` + DocumentID string `json:"document_id"` + UserID string `json:"user_id"` + StartTime time.Time `json:"start_time"` +} + +type GetDocumentReadStatsCappedRow struct { + PagesRead int64 `json:"pages_read"` + TotalTime int64 `json:"total_time"` +} + +func (q *Queries) GetDocumentReadStatsCapped(ctx context.Context, arg GetDocumentReadStatsCappedParams) (GetDocumentReadStatsCappedRow, error) { + row := q.db.QueryRowContext(ctx, getDocumentReadStatsCapped, + arg.PageDurationCap, + arg.DocumentID, + arg.UserID, + arg.StartTime, + ) + var i GetDocumentReadStatsCappedRow + err := row.Scan(&i.PagesRead, &i.TotalTime) + return i, err +} + +const getDocuments = `-- name: GetDocuments :many +SELECT id, md5, filepath, title, author, series, series_index, lang, description, olid, synced, deleted, updated_at, created_at FROM documents +ORDER BY created_at DESC +LIMIT ?2 +OFFSET ?1 +` + +type GetDocumentsParams struct { + Offset int64 `json:"offset"` + Limit int64 `json:"limit"` +} + +func (q *Queries) GetDocuments(ctx context.Context, arg GetDocumentsParams) ([]Document, error) { + rows, err := q.db.QueryContext(ctx, getDocuments, arg.Offset, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Document + for rows.Next() { + var i Document + if err := rows.Scan( + &i.ID, + &i.Md5, + &i.Filepath, + &i.Title, + &i.Author, + &i.Series, + &i.SeriesIndex, + &i.Lang, + &i.Description, + &i.Olid, + &i.Synced, + &i.Deleted, + &i.UpdatedAt, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getDocumentsWithStats = `-- name: GetDocumentsWithStats :many +WITH true_progress AS ( + SELECT + start_time AS last_read, + SUM(duration) / 60 AS total_time_minutes, + document_id, + current_page, + total_pages, + ROUND(CAST(current_page AS REAL) / CAST(total_pages AS REAL) * 100, 2) AS percentage + FROM activity + WHERE user_id = ?3 + GROUP BY document_id + HAVING MAX(start_time) +) +SELECT + documents.id, documents.md5, documents.filepath, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.olid, documents.synced, documents.deleted, documents.updated_at, documents.created_at, + + CAST(IFNULL(current_page, 0) AS INTEGER) AS current_page, + CAST(IFNULL(total_pages, 0) AS INTEGER) AS total_pages, + CAST(IFNULL(total_time_minutes, 0) AS INTEGER) AS total_time_minutes, + + CAST( + STRFTIME('%Y-%m-%dT%H:%M:%SZ', IFNULL(last_read, "1970-01-01") + ) AS TEXT) AS last_read, + + CAST(CASE + WHEN percentage > 97.0 THEN 100.0 + WHEN percentage IS NULL THEN 0.0 + ELSE percentage + END AS REAL) AS percentage + +FROM documents +LEFT JOIN true_progress ON document_id = id +ORDER BY last_read DESC, created_at DESC +LIMIT ?2 +OFFSET ?1 +` + +type GetDocumentsWithStatsParams struct { + Offset int64 `json:"offset"` + Limit int64 `json:"limit"` + UserID string `json:"user_id"` +} + +type GetDocumentsWithStatsRow struct { + ID string `json:"id"` + Md5 *string `json:"md5"` + Filepath *string `json:"filepath"` + Title *string `json:"title"` + Author *string `json:"author"` + Series *string `json:"series"` + SeriesIndex *int64 `json:"series_index"` + Lang *string `json:"lang"` + Description *string `json:"description"` + Olid *string `json:"-"` + Synced bool `json:"-"` + Deleted bool `json:"-"` + UpdatedAt time.Time `json:"updated_at"` + CreatedAt time.Time `json:"created_at"` + CurrentPage int64 `json:"current_page"` + TotalPages int64 `json:"total_pages"` + TotalTimeMinutes int64 `json:"total_time_minutes"` + LastRead string `json:"last_read"` + Percentage float64 `json:"percentage"` +} + +func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) { + rows, err := q.db.QueryContext(ctx, getDocumentsWithStats, arg.Offset, arg.Limit, arg.UserID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetDocumentsWithStatsRow + for rows.Next() { + var i GetDocumentsWithStatsRow + if err := rows.Scan( + &i.ID, + &i.Md5, + &i.Filepath, + &i.Title, + &i.Author, + &i.Series, + &i.SeriesIndex, + &i.Lang, + &i.Description, + &i.Olid, + &i.Synced, + &i.Deleted, + &i.UpdatedAt, + &i.CreatedAt, + &i.CurrentPage, + &i.TotalPages, + &i.TotalTimeMinutes, + &i.LastRead, + &i.Percentage, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getLastActivity = `-- name: GetLastActivity :one +SELECT start_time +FROM activity +WHERE device_id = ?1 +AND user_id = ?2 +ORDER BY start_time DESC LIMIT 1 +` + +type GetLastActivityParams struct { + DeviceID string `json:"device_id"` + UserID string `json:"user_id"` +} + +func (q *Queries) GetLastActivity(ctx context.Context, arg GetLastActivityParams) (time.Time, error) { + row := q.db.QueryRowContext(ctx, getLastActivity, arg.DeviceID, arg.UserID) + var start_time time.Time + err := row.Scan(&start_time) + return start_time, err +} + +const getMissingDocuments = `-- name: GetMissingDocuments :many +SELECT documents.id, documents.md5, documents.filepath, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.olid, documents.synced, documents.deleted, documents.updated_at, documents.created_at FROM documents +WHERE + documents.filepath IS NOT NULL + AND documents.deleted = false + AND documents.id NOT IN (/*SLICE:document_ids*/?) +` + +func (q *Queries) GetMissingDocuments(ctx context.Context, documentIds []string) ([]Document, error) { + query := getMissingDocuments + var queryParams []interface{} + if len(documentIds) > 0 { + for _, v := range documentIds { + queryParams = append(queryParams, v) + } + query = strings.Replace(query, "/*SLICE:document_ids*/?", strings.Repeat(",?", len(documentIds))[1:], 1) + } else { + query = strings.Replace(query, "/*SLICE:document_ids*/?", "NULL", 1) + } + rows, err := q.db.QueryContext(ctx, query, queryParams...) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Document + for rows.Next() { + var i Document + if err := rows.Scan( + &i.ID, + &i.Md5, + &i.Filepath, + &i.Title, + &i.Author, + &i.Series, + &i.SeriesIndex, + &i.Lang, + &i.Description, + &i.Olid, + &i.Synced, + &i.Deleted, + &i.UpdatedAt, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getProgress = `-- name: GetProgress :one +SELECT + document_progress.user_id, document_progress.document_id, document_progress.device_id, document_progress.percentage, document_progress.progress, document_progress.created_at, + devices.device_name +FROM document_progress +JOIN devices ON document_progress.device_id = devices.id +WHERE + document_progress.user_id = ?1 + AND document_progress.document_id = ?2 +ORDER BY + document_progress.created_at + DESC +LIMIT 1 +` + +type GetProgressParams struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` +} + +type GetProgressRow struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + Percentage float64 `json:"percentage"` + Progress string `json:"progress"` + CreatedAt time.Time `json:"created_at"` + DeviceName string `json:"device_name"` +} + +func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) (GetProgressRow, error) { + row := q.db.QueryRowContext(ctx, getProgress, arg.UserID, arg.DocumentID) + var i GetProgressRow + err := row.Scan( + &i.UserID, + &i.DocumentID, + &i.DeviceID, + &i.Percentage, + &i.Progress, + &i.CreatedAt, + &i.DeviceName, + ) + return i, err +} + +const getUser = `-- name: GetUser :one +SELECT id, pass, admin, created_at FROM users +WHERE id = ?1 LIMIT 1 +` + +func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) { + row := q.db.QueryRowContext(ctx, getUser, userID) + var i User + err := row.Scan( + &i.ID, + &i.Pass, + &i.Admin, + &i.CreatedAt, + ) + return i, err +} + +const getUserDayStreaks = `-- name: GetUserDayStreaks :one +WITH document_days AS ( + SELECT date(start_time, 'localtime') AS read_day + FROM activity + WHERE user_id = ?1 + GROUP BY read_day + ORDER BY read_day DESC +), +partitions AS ( + SELECT + document_days.read_day, + row_number() OVER ( + PARTITION BY 1 ORDER BY read_day DESC + ) AS seqnum + FROM document_days +), +streaks AS ( + SELECT + count(*) AS streak, + MIN(read_day) AS start_date, + MAX(read_day) AS end_date + FROM partitions + GROUP BY date(read_day, '+' || seqnum || ' day') + ORDER BY end_date DESC +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date + FROM streaks +) +SELECT + CAST(max_streak AS INTEGER), + CAST(max_streak_start_date AS TEXT), + CAST(max_streak_end_date AS TEXT), + streak AS current_streak, + CAST(start_date AS TEXT) AS current_streak_start_date, + CAST(end_date AS TEXT) AS current_streak_end_date +FROM max_streak, streaks LIMIT 1 +` + +type GetUserDayStreaksRow struct { + MaxStreak int64 `json:"max_streak"` + MaxStreakStartDate string `json:"max_streak_start_date"` + MaxStreakEndDate string `json:"max_streak_end_date"` + CurrentStreak int64 `json:"current_streak"` + CurrentStreakStartDate string `json:"current_streak_start_date"` + CurrentStreakEndDate string `json:"current_streak_end_date"` +} + +func (q *Queries) GetUserDayStreaks(ctx context.Context, userID string) (GetUserDayStreaksRow, error) { + row := q.db.QueryRowContext(ctx, getUserDayStreaks, userID) + var i GetUserDayStreaksRow + err := row.Scan( + &i.MaxStreak, + &i.MaxStreakStartDate, + &i.MaxStreakEndDate, + &i.CurrentStreak, + &i.CurrentStreakStartDate, + &i.CurrentStreakEndDate, + ) + return i, err +} + +const getUserWeekStreaks = `-- name: GetUserWeekStreaks :one +WITH document_weeks AS ( + SELECT STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') AS read_week + FROM activity + WHERE user_id = ?1 + GROUP BY read_week + ORDER BY read_week DESC +), +partitions AS ( + SELECT + document_weeks.read_week, + row_number() OVER ( + PARTITION BY 1 ORDER BY read_week DESC + ) AS seqnum + FROM document_weeks +), +streaks AS ( + SELECT + count(*) AS streak, + MIN(read_week) AS start_date, + MAX(read_week) AS end_date + FROM partitions + GROUP BY date(read_week, '+' || (seqnum * 7) || ' day') + ORDER BY end_date DESC +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date + FROM streaks +) +SELECT + CAST(max_streak AS INTEGER), + CAST(max_streak_start_date AS TEXT), + CAST(max_streak_end_date AS TEXT), + streak AS current_streak, + CAST(start_date AS TEXT) AS current_streak_start_date, + CAST(end_date AS TEXT) AS current_streak_end_date +FROM max_streak, streaks LIMIT 1 +` + +type GetUserWeekStreaksRow struct { + MaxStreak int64 `json:"max_streak"` + MaxStreakStartDate string `json:"max_streak_start_date"` + MaxStreakEndDate string `json:"max_streak_end_date"` + CurrentStreak int64 `json:"current_streak"` + CurrentStreakStartDate string `json:"current_streak_start_date"` + CurrentStreakEndDate string `json:"current_streak_end_date"` +} + +func (q *Queries) GetUserWeekStreaks(ctx context.Context, userID string) (GetUserWeekStreaksRow, error) { + row := q.db.QueryRowContext(ctx, getUserWeekStreaks, userID) + var i GetUserWeekStreaksRow + err := row.Scan( + &i.MaxStreak, + &i.MaxStreakStartDate, + &i.MaxStreakEndDate, + &i.CurrentStreak, + &i.CurrentStreakStartDate, + &i.CurrentStreakEndDate, + ) + return i, err +} + +const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one +WITH document_windows AS ( + SELECT CASE + WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') + WHEN ?2 = "DAY" THEN date(start_time, 'localtime') + END AS read_window + FROM activity + WHERE user_id = ?1 + AND CAST(?2 AS TEXT) = CAST(?2 AS TEXT) + GROUP BY read_window + ORDER BY read_window DESC +), +partitions AS ( + SELECT + document_windows.read_window, + row_number() OVER ( + PARTITION BY 1 ORDER BY read_window DESC + ) AS seqnum + FROM document_windows +), +streaks AS ( + SELECT + count(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date + FROM partitions + GROUP BY CASE + WHEN ?2 = "DAY" THEN date(read_window, '+' || seqnum || ' day') + WHEN ?2 = "WEEK" THEN date(read_window, '+' || (seqnum * 7) || ' day') + END + ORDER BY end_date DESC +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date + FROM streaks +) +SELECT + CAST(max_streak AS INTEGER), + CAST(max_streak_start_date AS TEXT), + CAST(max_streak_end_date AS TEXT), + streak AS current_streak, + CAST(start_date AS TEXT) AS current_streak_start_date, + CAST(end_date AS TEXT) AS current_streak_end_date +FROM max_streak, streaks LIMIT 1 +` + +type GetUserWindowStreaksParams struct { + UserID string `json:"user_id"` + Window string `json:"window"` +} + +type GetUserWindowStreaksRow struct { + MaxStreak int64 `json:"max_streak"` + MaxStreakStartDate string `json:"max_streak_start_date"` + MaxStreakEndDate string `json:"max_streak_end_date"` + CurrentStreak int64 `json:"current_streak"` + CurrentStreakStartDate string `json:"current_streak_start_date"` + CurrentStreakEndDate string `json:"current_streak_end_date"` +} + +func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) { + row := q.db.QueryRowContext(ctx, getUserWindowStreaks, arg.UserID, arg.Window) + var i GetUserWindowStreaksRow + err := row.Scan( + &i.MaxStreak, + &i.MaxStreakStartDate, + &i.MaxStreakEndDate, + &i.CurrentStreak, + &i.CurrentStreakStartDate, + &i.CurrentStreakEndDate, + ) + return i, err +} + +const getUsers = `-- name: GetUsers :many +SELECT id, pass, admin, created_at FROM users +WHERE + users.id = ?1 + OR ?1 IN ( + SELECT id + FROM users + WHERE id = ?1 + AND admin = 1 + ) +ORDER BY created_at DESC +LIMIT ?3 +OFFSET ?2 +` + +type GetUsersParams struct { + User string `json:"user"` + Offset int64 `json:"offset"` + Limit int64 `json:"limit"` +} + +func (q *Queries) GetUsers(ctx context.Context, arg GetUsersParams) ([]User, error) { + rows, err := q.db.QueryContext(ctx, getUsers, arg.User, arg.Offset, arg.Limit) + if err != nil { + return nil, err + } + defer rows.Close() + var items []User + for rows.Next() { + var i User + if err := rows.Scan( + &i.ID, + &i.Pass, + &i.Admin, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getWantedDocuments = `-- name: GetWantedDocuments :many +SELECT CAST(value AS TEXT) AS id +FROM json_each(?1) +LEFT JOIN documents +ON value = documents.id +WHERE ( + documents.id IS NOT NULL + AND documents.synced = false +) +OR (documents.id IS NULL) +OR CAST(?1 AS TEXT) != CAST(?1 AS TEXT) +` + +func (q *Queries) GetWantedDocuments(ctx context.Context, documentIds string) ([]string, error) { + rows, err := q.db.QueryContext(ctx, getWantedDocuments, documentIds) + if err != nil { + return nil, err + } + defer rows.Close() + var items []string + for rows.Next() { + var id string + if err := rows.Scan(&id); err != nil { + return nil, err + } + items = append(items, id) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateDocumentDeleted = `-- name: UpdateDocumentDeleted :one +UPDATE documents +SET + deleted = ?1 +WHERE id = ?2 +RETURNING id, md5, filepath, title, author, series, series_index, lang, description, olid, synced, deleted, updated_at, created_at +` + +type UpdateDocumentDeletedParams struct { + Deleted bool `json:"-"` + ID string `json:"id"` +} + +func (q *Queries) UpdateDocumentDeleted(ctx context.Context, arg UpdateDocumentDeletedParams) (Document, error) { + row := q.db.QueryRowContext(ctx, updateDocumentDeleted, arg.Deleted, arg.ID) + var i Document + err := row.Scan( + &i.ID, + &i.Md5, + &i.Filepath, + &i.Title, + &i.Author, + &i.Series, + &i.SeriesIndex, + &i.Lang, + &i.Description, + &i.Olid, + &i.Synced, + &i.Deleted, + &i.UpdatedAt, + &i.CreatedAt, + ) + return i, err +} + +const updateDocumentSync = `-- name: UpdateDocumentSync :one +UPDATE documents +SET + synced = ?1 +WHERE id = ?2 +RETURNING id, md5, filepath, title, author, series, series_index, lang, description, olid, synced, deleted, updated_at, created_at +` + +type UpdateDocumentSyncParams struct { + Synced bool `json:"-"` + ID string `json:"id"` +} + +func (q *Queries) UpdateDocumentSync(ctx context.Context, arg UpdateDocumentSyncParams) (Document, error) { + row := q.db.QueryRowContext(ctx, updateDocumentSync, arg.Synced, arg.ID) + var i Document + err := row.Scan( + &i.ID, + &i.Md5, + &i.Filepath, + &i.Title, + &i.Author, + &i.Series, + &i.SeriesIndex, + &i.Lang, + &i.Description, + &i.Olid, + &i.Synced, + &i.Deleted, + &i.UpdatedAt, + &i.CreatedAt, + ) + return i, err +} + +const updateProgress = `-- name: UpdateProgress :one +INSERT OR REPLACE INTO document_progress ( + user_id, + document_id, + device_id, + percentage, + progress +) +VALUES (?, ?, ?, ?, ?) +RETURNING user_id, document_id, device_id, percentage, progress, created_at +` + +type UpdateProgressParams struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + Percentage float64 `json:"percentage"` + Progress string `json:"progress"` +} + +func (q *Queries) UpdateProgress(ctx context.Context, arg UpdateProgressParams) (DocumentProgress, error) { + row := q.db.QueryRowContext(ctx, updateProgress, + arg.UserID, + arg.DocumentID, + arg.DeviceID, + arg.Percentage, + arg.Progress, + ) + var i DocumentProgress + err := row.Scan( + &i.UserID, + &i.DocumentID, + &i.DeviceID, + &i.Percentage, + &i.Progress, + &i.CreatedAt, + ) + return i, err +} + +const upsertDevice = `-- name: UpsertDevice :one +INSERT INTO devices (id, user_id, device_name) +VALUES (?, ?, ?) +ON CONFLICT DO UPDATE +SET + device_name = COALESCE(excluded.device_name, device_name) +RETURNING id, user_id, device_name, created_at, sync +` + +type UpsertDeviceParams struct { + ID string `json:"id"` + UserID string `json:"user_id"` + DeviceName string `json:"device_name"` +} + +func (q *Queries) UpsertDevice(ctx context.Context, arg UpsertDeviceParams) (Device, error) { + row := q.db.QueryRowContext(ctx, upsertDevice, arg.ID, arg.UserID, arg.DeviceName) + var i Device + err := row.Scan( + &i.ID, + &i.UserID, + &i.DeviceName, + &i.CreatedAt, + &i.Sync, + ) + return i, err +} + +const upsertDocument = `-- name: UpsertDocument :one +INSERT INTO documents ( + id, + md5, + filepath, + title, + author, + series, + series_index, + lang, + description, + olid +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +ON CONFLICT DO UPDATE +SET + md5 = COALESCE(excluded.md5, md5), + filepath = COALESCE(excluded.filepath, filepath), + title = COALESCE(excluded.title, title), + author = COALESCE(excluded.author, author), + series = COALESCE(excluded.series, series), + series_index = COALESCE(excluded.series_index, series_index), + lang = COALESCE(excluded.lang, lang), + description = COALESCE(excluded.description, description), + olid = COALESCE(excluded.olid, olid) +RETURNING id, md5, filepath, title, author, series, series_index, lang, description, olid, synced, deleted, updated_at, created_at +` + +type UpsertDocumentParams struct { + ID string `json:"id"` + Md5 *string `json:"md5"` + Filepath *string `json:"filepath"` + Title *string `json:"title"` + Author *string `json:"author"` + Series *string `json:"series"` + SeriesIndex *int64 `json:"series_index"` + Lang *string `json:"lang"` + Description *string `json:"description"` + Olid *string `json:"-"` +} + +func (q *Queries) UpsertDocument(ctx context.Context, arg UpsertDocumentParams) (Document, error) { + row := q.db.QueryRowContext(ctx, upsertDocument, + arg.ID, + arg.Md5, + arg.Filepath, + arg.Title, + arg.Author, + arg.Series, + arg.SeriesIndex, + arg.Lang, + arg.Description, + arg.Olid, + ) + var i Document + err := row.Scan( + &i.ID, + &i.Md5, + &i.Filepath, + &i.Title, + &i.Author, + &i.Series, + &i.SeriesIndex, + &i.Lang, + &i.Description, + &i.Olid, + &i.Synced, + &i.Deleted, + &i.UpdatedAt, + &i.CreatedAt, + ) + return i, err +} diff --git a/database/schema.sql b/database/schema.sql new file mode 100644 index 0000000..f0e895f --- /dev/null +++ b/database/schema.sql @@ -0,0 +1,156 @@ +PRAGMA foreign_keys = ON; + +-- Authentication +CREATE TABLE IF NOT EXISTS users ( + id TEXT NOT NULL PRIMARY KEY, + + pass TEXT NOT NULL, + admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)), + + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- Books / Documents +CREATE TABLE IF NOT EXISTS documents ( + id TEXT NOT NULL PRIMARY KEY, + + md5 TEXT, + filepath TEXT, + title TEXT, + author TEXT, + series TEXT, + series_index INTEGER, + lang TEXT, + description TEXT, + olid TEXT, + synced BOOLEAN NOT NULL DEFAULT 0 CHECK (synced IN (0, 1)), + deleted BOOLEAN NOT NULL DEFAULT 0 CHECK (deleted IN (0, 1)), + + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- Devices +CREATE TABLE IF NOT EXISTS devices ( + id TEXT NOT NULL PRIMARY KEY, + user_id TEXT NOT NULL, + + device_name TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)), + + FOREIGN KEY (user_id) REFERENCES users (id) +); + +-- Document Device Sync +CREATE TABLE IF NOT EXISTS document_device_sync ( + user_id TEXT NOT NULL, + document_id TEXT NOT NULL, + device_id TEXT NOT NULL, + + last_synced DATETIME NOT NULL, + sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)), + + FOREIGN KEY (user_id) REFERENCES users (id), + FOREIGN KEY (document_id) REFERENCES documents (id), + FOREIGN KEY (device_id) REFERENCES devices (id), + PRIMARY KEY (user_id, document_id, device_id) +); + +-- User Document Progress +CREATE TABLE IF NOT EXISTS document_progress ( + user_id TEXT NOT NULL, + document_id TEXT NOT NULL, + device_id TEXT NOT NULL, + + percentage REAL NOT NULL, + progress TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + + FOREIGN KEY (user_id) REFERENCES users (id), + FOREIGN KEY (document_id) REFERENCES documents (id), + FOREIGN KEY (device_id) REFERENCES devices (id), + PRIMARY KEY (user_id, document_id, device_id) +); + +-- Read Activity +CREATE TABLE IF NOT EXISTS activity ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id TEXT NOT NULL, + document_id TEXT NOT NULL, + device_id TEXT NOT NULL, + + start_time DATETIME NOT NULL, + duration INTEGER NOT NULL, + current_page INTEGER NOT NULL, + total_pages INTEGER NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + + FOREIGN KEY (user_id) REFERENCES users (id), + FOREIGN KEY (document_id) REFERENCES documents (id), + FOREIGN KEY (device_id) REFERENCES devices (id) +); + +-- Update Trigger +CREATE TRIGGER IF NOT EXISTS update_documents_updated_at +BEFORE UPDATE ON documents BEGIN +UPDATE documents +SET updated_at = CURRENT_TIMESTAMP +WHERE id = old.id; +END; + +-- Rescaled Activity View (Adapted from KOReader) +CREATE VIEW IF NOT EXISTS rescaled_activity AS + +WITH RECURSIVE numbers (idx) AS ( + SELECT 1 AS idx + UNION ALL + SELECT idx + 1 + FROM numbers + LIMIT 1000 +), + +total_pages AS ( + SELECT + document_id, + total_pages AS pages + FROM activity + GROUP BY document_id + HAVING MAX(start_time) + ORDER BY start_time DESC +), + +intermediate AS ( + SELECT + activity.document_id, + activity.device_id, + activity.user_id, + activity.current_page, + activity.total_pages, + total_pages.pages, + activity.start_time, + activity.duration, + numbers.idx, + -- Derive First Page + ((activity.current_page - 1) * total_pages.pages) / activity.total_pages + + 1 AS first_page, + -- Derive Last Page + MAX( + ((activity.current_page - 1) * total_pages.pages) + / activity.total_pages + + 1, + (activity.current_page * total_pages.pages) / activity.total_pages + ) AS last_page + FROM activity + INNER JOIN total_pages ON total_pages.document_id = activity.document_id + INNER JOIN numbers ON numbers.idx <= (last_page - first_page + 1) +) + +SELECT + document_id, + device_id, + user_id, + start_time, + first_page + idx - 1 AS page, + duration / (last_page - first_page + 1) AS duration +FROM intermediate; diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..cc27106 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,12 @@ +--- +services: + sync-ninja: + # working_dir: /app + environment: + - CONFIG_PATH=/data + - DATA_PATH=/data + ports: + - "8585:8585" + build: . + volumes: + - ./data:/data diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..5295484 --- /dev/null +++ b/go.mod @@ -0,0 +1,48 @@ +module reichard.io/bbank + +go 1.19 + +require ( + github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736 + github.com/gabriel-vasile/mimetype v1.4.2 + github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271 + github.com/gin-contrib/sessions v0.0.4 + github.com/gin-gonic/gin v1.9.1 + github.com/mattn/go-sqlite3 v1.14.17 + github.com/sirupsen/logrus v1.9.3 + github.com/urfave/cli/v2 v2.25.7 + golang.org/x/exp v0.0.0-20230905200255-921286631fa9 +) + +require ( + github.com/bytedance/sonic v1.10.0 // indirect + github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect + github.com/chenzhuoyu/iasm v0.9.0 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.15.3 // indirect + github.com/goccy/go-json v0.10.2 // indirect + github.com/gorilla/context v1.1.1 // indirect + github.com/gorilla/securecookie v1.1.1 // indirect + github.com/gorilla/sessions v1.2.1 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/cpuid/v2 v2.2.5 // indirect + github.com/leodido/go-urn v1.2.4 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.11 // indirect + github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect + golang.org/x/arch v0.4.0 // indirect + golang.org/x/crypto v0.12.0 // indirect + golang.org/x/net v0.14.0 // indirect + golang.org/x/sys v0.12.0 // indirect + golang.org/x/text v0.12.0 // indirect + google.golang.org/protobuf v1.31.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..22cccd5 --- /dev/null +++ b/go.sum @@ -0,0 +1,217 @@ +github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736 h1:qZaEtLxnqY5mJ0fVKbk31NVhlgi0yrKm51Pq/I5wcz4= +github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736/go.mod h1:mTeFRcTdnpzOlRjMoFYC/80HwVUreupyAiqPkCZQOXc= +github.com/antonlindstrom/pgstore v0.0.0-20200229204646-b08ebf1105e0/go.mod h1:2Ti6VUHVxpC0VSmTZzEvpzysnaGAfGBOoMIz5ykPyyw= +github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff/go.mod h1:+RTT1BOk5P97fT2CiHkbFQwkK3mjsFAP6zCYV2aXtjw= +github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= +github.com/bradleypeabody/gorilla-sessions-memcache v0.0.0-20181103040241-659414f458e1/go.mod h1:dkChI7Tbtx7H1Tj7TqGSZMOeGpMP5gLHtjroHd4agiI= +github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= +github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= +github.com/bytedance/sonic v1.10.0 h1:qtNZduETEIWJVIyDl01BeNxur2rW9OwTQ/yBqFRkKEk= +github.com/bytedance/sonic v1.10.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= +github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= +github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= +github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= +github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo= +github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= +github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271 h1:s+boMV47gwTyff2PL+k6V33edJpp+K5y3QPzZlRhno8= +github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271/go.mod h1:XLLtIXoP9+9zGcEDc7gAGV3AksGPO+vzv4kXHMJSdU0= +github.com/gin-contrib/sessions v0.0.4 h1:gq4fNa1Zmp564iHP5G6EBuktilEos8VKhe2sza1KMgo= +github.com/gin-contrib/sessions v0.0.4/go.mod h1:pQ3sIyviBBGcxgyR8mkeJuXbeV3h3NYmhJADQTq5+Vo= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= +github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= +github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= +github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= +github.com/go-playground/validator/v10 v10.15.3 h1:S+sSpunYjNPDuXkWbK+x+bA7iXiW296KG4dL3X7xUZo= +github.com/go-playground/validator/v10 v10.15.3/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8= +github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.1.1/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w= +github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kidstuff/mongostore v0.0.0-20181113001930-e650cd85ee4b/go.mod h1:g2nVr8KZVXJSS97Jo8pJ0jgq29P6H7dG0oplUA86MQw= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= +github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= +github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM= +github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/memcachier/mc v2.0.1+incompatible/go.mod h1:7bkvFE61leUBvXz+yxsOnGBQSZpBSPIMUQSmmSHvuXc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= +github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b/go.mod h1:wTPjTepVu7uJBYgZ0SdWHQlIas582j6cn2jgk4DDdlg= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= +github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= +github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/urfave/cli/v2 v2.25.7 h1:VAzn5oq403l5pHjc4OhD54+XGO9cdKVL/7lDjF+iKUs= +github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ= +github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU= +github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc= +golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= +golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= +golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= +golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/graph/graph.go b/graph/graph.go new file mode 100644 index 0000000..df13b18 --- /dev/null +++ b/graph/graph.go @@ -0,0 +1,173 @@ +package graph + +import ( + "fmt" + "math" + + "reichard.io/bbank/database" +) + +type SVGGraphPoint struct { + X int + Y int + Size int +} + +type SVGGraphData struct { + Height int + Width int + Offset int + LinePoints []SVGGraphPoint + BarPoints []SVGGraphPoint + BezierPath string + BezierFill string +} + +type SVGBezierOpposedLine struct { + Length int + Angle int +} + +func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SVGGraphData { + // Static Padding + var padding int = 5 + + // Derive Height + var maxHeight int = 0 + for _, item := range inputData { + if int(item.MinutesRead) > maxHeight { + maxHeight = int(item.MinutesRead) + } + } + + // Derive Block Offsets & Transformed Coordinates (Line & Bar) + var blockOffset int = int(math.Floor(float64(svgWidth) / float64(len(inputData)))) + + // Line & Bar Points + linePoints := []SVGGraphPoint{} + barPoints := []SVGGraphPoint{} + + // Bezier Fill Coordinates (Max X, Min X, Max Y) + var maxBX int = 0 + var maxBY int = 0 + var minBX int = 0 + for idx, item := range inputData { + itemSize := int(item.MinutesRead) + itemY := (maxHeight + padding) - itemSize + barPoints = append(barPoints, SVGGraphPoint{ + X: (idx * blockOffset) + (blockOffset / 2), + Y: itemY, + Size: itemSize + padding, + }) + + lineX := (idx + 1) * blockOffset + linePoints = append(linePoints, SVGGraphPoint{ + X: lineX, + Y: itemY, + Size: itemSize + padding, + }) + + if lineX > maxBX { + maxBX = lineX + } + + if lineX < minBX { + minBX = lineX + } + + if itemY > maxBY { + maxBY = itemY + } + } + + // Return Data + return SVGGraphData{ + Width: svgWidth + padding*2, + Height: maxHeight + padding*2, + Offset: blockOffset, + LinePoints: linePoints, + BarPoints: barPoints, + BezierPath: getSVGBezierPath(linePoints), + BezierFill: fmt.Sprintf("L %d,%d L %d,%d Z", maxBX, maxBY+padding, minBX, maxBY+padding), + } +} + +func getSVGBezierOpposedLine(pointA SVGGraphPoint, pointB SVGGraphPoint) SVGBezierOpposedLine { + lengthX := float64(pointB.X - pointA.X) + lengthY := float64(pointB.Y - pointA.Y) + + return SVGBezierOpposedLine{ + Length: int(math.Sqrt(math.Pow(lengthX, 2) + math.Pow(lengthY, 2))), + Angle: int(math.Atan2(lengthY, lengthX)), + } + + // length = Math.sqrt(Math.pow(lengthX, 2) + Math.pow(lengthY, 2)), + // angle = Math.atan2(lengthY, lengthX) +} + +func getSVGBezierControlPoint(currentPoint *SVGGraphPoint, prevPoint *SVGGraphPoint, nextPoint *SVGGraphPoint, isReverse bool) SVGGraphPoint { + // First / Last Point + if prevPoint == nil { + prevPoint = currentPoint + } + if nextPoint == nil { + nextPoint = currentPoint + } + + // Modifiers + var smoothingRatio float64 = 0.2 + var directionModifier float64 = 0 + if isReverse == true { + directionModifier = math.Pi + } + + opposingLine := getSVGBezierOpposedLine(*prevPoint, *nextPoint) + var lineAngle float64 = float64(opposingLine.Angle) + directionModifier + var lineLength float64 = float64(opposingLine.Length) * smoothingRatio + + // Calculate Control Point + return SVGGraphPoint{ + X: currentPoint.X + int(math.Cos(float64(lineAngle))*lineLength), + Y: currentPoint.Y + int(math.Sin(float64(lineAngle))*lineLength), + } +} + +func getSVGBezierCurve(point SVGGraphPoint, index int, allPoints []SVGGraphPoint) []SVGGraphPoint { + var pointMinusTwo *SVGGraphPoint + var pointMinusOne *SVGGraphPoint + var pointPlusOne *SVGGraphPoint + + if index-2 >= 0 && index-2 < len(allPoints) { + pointMinusTwo = &allPoints[index-2] + } + if index-1 >= 0 && index-1 < len(allPoints) { + pointMinusOne = &allPoints[index-1] + } + if index+1 >= 0 && index+1 < len(allPoints) { + pointPlusOne = &allPoints[index+1] + } + + startControlPoint := getSVGBezierControlPoint(pointMinusOne, pointMinusTwo, &point, false) + endControlPoint := getSVGBezierControlPoint(&point, pointMinusOne, pointPlusOne, true) + + return []SVGGraphPoint{ + startControlPoint, + endControlPoint, + point, + } +} + +func getSVGBezierPath(allPoints []SVGGraphPoint) string { + var bezierSVGPath string = "" + + for index, point := range allPoints { + if index == 0 { + bezierSVGPath += fmt.Sprintf("M %d,%d", point.X, point.Y) + } else { + newPoints := getSVGBezierCurve(point, index, allPoints) + bezierSVGPath += fmt.Sprintf(" C%d,%d %d,%d %d,%d", newPoints[0].X, newPoints[0].Y, newPoints[1].X, newPoints[1].Y, newPoints[2].X, newPoints[2].Y) + } + } + + return bezierSVGPath +} diff --git a/metadata/metadata.go b/metadata/metadata.go new file mode 100644 index 0000000..46d6709 --- /dev/null +++ b/metadata/metadata.go @@ -0,0 +1,104 @@ +package metadata + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + + log "github.com/sirupsen/logrus" +) + +type coverResult struct { + CoverEditionKey string `json:"cover_edition_key"` +} + +type queryResponse struct { + ResultCount int `json:"numFound"` + Start int `json:"start"` + ResultCountExact bool `json:"numFoundExact"` + Results []coverResult `json:"docs"` +} + +var BASE_QUERY_URL string = "https://openlibrary.org/search.json?q=%s&fields=cover_edition_key" +var BASE_COVER_URL string = "https://covers.openlibrary.org/b/olid/%s-L.jpg" + +func GetCoverIDs(title *string, author *string) ([]string, error) { + if title == nil || author == nil { + log.Error("[metadata] Invalid Search Query") + return nil, errors.New("Invalid Query") + } + + searchQuery := url.QueryEscape(fmt.Sprintf("%s %s", *title, *author)) + apiQuery := fmt.Sprintf(BASE_QUERY_URL, searchQuery) + + log.Info("[metadata] Acquiring CoverID") + resp, err := http.Get(apiQuery) + if err != nil { + log.Error("[metadata] Cover URL API Failure") + return nil, errors.New("API Failure") + } + + target := queryResponse{} + err = json.NewDecoder(resp.Body).Decode(&target) + if err != nil { + log.Error("[metadata] Cover URL API Decode Failure") + return nil, errors.New("API Failure") + } + + var coverIDs []string + for _, result := range target.Results { + if result.CoverEditionKey != "" { + coverIDs = append(coverIDs, result.CoverEditionKey) + } + } + + return coverIDs, nil +} + +func DownloadAndSaveCover(coverID string, dirPath string) (*string, error) { + // Derive & Sanitize File Name + fileName := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", coverID)) + + // Generate Storage Path + safePath := filepath.Join(dirPath, "covers", fileName) + + // Validate File Doesn't Exists + _, err := os.Stat(safePath) + if err == nil { + log.Warn("[metadata] File Alreads Exists") + return &safePath, nil + } + + // Create File + out, err := os.Create(safePath) + if err != nil { + log.Error("[metadata] File Create Error") + return nil, errors.New("File Failure") + } + defer out.Close() + + // Download File + log.Info("[metadata] Downloading Cover") + coverURL := fmt.Sprintf(BASE_COVER_URL, coverID) + resp, err := http.Get(coverURL) + if err != nil { + log.Error("[metadata] Cover URL API Failure") + return nil, errors.New("API Failure") + } + defer resp.Body.Close() + + // Copy File to Disk + _, err = io.Copy(out, resp.Body) + if err != nil { + log.Error("[metadata] File Copy Error") + return nil, errors.New("File Failure") + } + + // Return FilePath + return &safePath, nil +} diff --git a/screenshots/documents.png b/screenshots/documents.png new file mode 100644 index 0000000..85918da Binary files /dev/null and b/screenshots/documents.png differ diff --git a/screenshots/home.png b/screenshots/home.png new file mode 100644 index 0000000..0957c91 Binary files /dev/null and b/screenshots/home.png differ diff --git a/screenshots/login.png b/screenshots/login.png new file mode 100644 index 0000000..8f03e62 Binary files /dev/null and b/screenshots/login.png differ diff --git a/server/server.go b/server/server.go new file mode 100644 index 0000000..a927277 --- /dev/null +++ b/server/server.go @@ -0,0 +1,62 @@ +package server + +import ( + "context" + "net/http" + "os" + "path/filepath" + "time" + + log "github.com/sirupsen/logrus" + + "reichard.io/bbank/api" + "reichard.io/bbank/config" + "reichard.io/bbank/database" +) + +type Server struct { + API *api.API + Config *config.Config + Database *database.DBManager + httpServer *http.Server +} + +func NewServer() *Server { + c := config.Load() + db := database.NewMgr(c) + api := api.NewApi(db, c) + + // Create Paths + docDir := filepath.Join(c.DataPath, "documents") + coversDir := filepath.Join(c.DataPath, "covers") + _ = os.Mkdir(docDir, os.ModePerm) + _ = os.Mkdir(coversDir, os.ModePerm) + + return &Server{ + API: api, + Config: c, + Database: db, + } +} + +func (s *Server) StartServer() { + listenAddr := (":" + s.Config.ListenPort) + + s.httpServer = &http.Server{ + Handler: s.API.Router, + Addr: listenAddr, + } + + go func() { + err := s.httpServer.ListenAndServe() + if err != nil { + log.Error("Error starting server ", err) + } + }() +} + +func (s *Server) StopServer() { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + s.httpServer.Shutdown(ctx) +} diff --git a/shell.nix b/shell.nix new file mode 100644 index 0000000..2d7d103 --- /dev/null +++ b/shell.nix @@ -0,0 +1,8 @@ +{ pkgs ? import { } }: + +pkgs.mkShell { + packages = with pkgs; [ + go + nodejs_20 + ]; +} diff --git a/sqlc.yaml b/sqlc.yaml new file mode 100644 index 0000000..6f25df0 --- /dev/null +++ b/sqlc.yaml @@ -0,0 +1,60 @@ +version: 2 +sql: + - engine: "sqlite" + schema: "./database/schema.sql" + queries: "./database/query.sql" + gen: + go: + package: "database" + out: "database" + emit_json_tags: true + overrides: + # Type pointers needed for JSON + - column: "documents.md5" + go_type: + type: "string" + pointer: true + - column: "documents.filepath" + go_type: + type: "string" + pointer: true + - column: "documents.title" + go_type: + type: "string" + pointer: true + - column: "documents.author" + go_type: + type: "string" + pointer: true + - column: "documents.series" + go_type: + type: "string" + pointer: true + - column: "documents.series_index" + go_type: + type: "int64" + pointer: true + - column: "documents.lang" + go_type: + type: "string" + pointer: true + - column: "documents.description" + go_type: + type: "string" + pointer: true + - column: "documents.olid" + go_type: + type: "string" + pointer: true + + # Do not generate JSON + - column: "documents.synced" + go_struct_tag: 'json:"-"' + - column: "documents.olid" + go_struct_tag: 'json:"-"' + - column: "documents.deleted" + go_struct_tag: 'json:"-"' + - column: "users.pass" + go_struct_tag: 'json:"-"' + - column: "users.admin" + go_struct_tag: 'json:"-"' diff --git a/templates/activity.html b/templates/activity.html new file mode 100644 index 0000000..0119372 --- /dev/null +++ b/templates/activity.html @@ -0,0 +1,4 @@ +{{template "base.html" .}} {{define "title"}}Activity{{end}} {{define +"content"}} +

Activity

+{{end}} diff --git a/templates/base.html b/templates/base.html new file mode 100644 index 0000000..a6f3c5b --- /dev/null +++ b/templates/base.html @@ -0,0 +1,202 @@ + + + + + + + {{block "title" .}}{{end}} + + +
+
+ +
+
+
+ +
+

{{block "title" .}}{{end}}

+
+ + + + + + +
+ +
+
+
+
+ {{block "content" .}}{{end}} +
+
+
+
+ + + + + diff --git a/templates/base.old.html b/templates/base.old.html new file mode 100644 index 0000000..f5b5bfa --- /dev/null +++ b/templates/base.old.html @@ -0,0 +1,182 @@ + + + + + + + {{block "title" .}}{{end}} + + + + + + +
+
+ {{block "content" .}}{{end}} +
+
+ + diff --git a/templates/documents.html b/templates/documents.html new file mode 100644 index 0000000..de56a7c --- /dev/null +++ b/templates/documents.html @@ -0,0 +1,51 @@ +{{template "base.html" .}} {{define "title"}}Documents{{end}} {{define +"content"}} +
+ {{range $doc := .Data }} +
+
+
+ + + +
+
+
+
+

Title

+

+ {{ or $doc.Title "Unknown" }} +

+
+
+
+
+

Author

+

+ {{ or $doc.Author "Unknown" }} +

+
+
+
+
+

Progress

+

+ {{ $doc.CurrentPage }} / {{ $doc.TotalPages }} ({{ $doc.Percentage }}%) +

+
+
+
+
+

Minutes Read

+

+ {{ $doc.TotalTimeMinutes }} Minutes +

+
+
+
+
+
+ + {{end}} +
+{{end}} diff --git a/templates/graph.svg b/templates/graph.svg new file mode 100644 index 0000000..a587c26 --- /dev/null +++ b/templates/graph.svg @@ -0,0 +1,35 @@ + + + {{ range $idx, $item := $data.BarPoints }} + + + + {{ end }} + + + + + + + + + diff --git a/templates/graphs.html b/templates/graphs.html new file mode 100644 index 0000000..0b118e9 --- /dev/null +++ b/templates/graphs.html @@ -0,0 +1,3 @@ +{{template "base.html" .}} {{define "title"}}Graphs{{end}} {{define "content"}} +

Graphs

+{{end}} diff --git a/templates/header.html b/templates/header.html new file mode 100644 index 0000000..a314344 --- /dev/null +++ b/templates/header.html @@ -0,0 +1,162 @@ + + + + diff --git a/templates/home.html b/templates/home.html new file mode 100644 index 0000000..2bf1e82 --- /dev/null +++ b/templates/home.html @@ -0,0 +1,235 @@ +{{template "base.html" .}} {{define "title"}}Home{{end}} {{define "content"}} + +
+
+

+ Daily Read Totals +

+ {{ $data := (GetSVGGraphData .Data.GraphData 800)}} + + + + + + + {{ range $index, $item := $data.LinePoints }} + + + + + {{ (index $.Data.GraphData $index).Date }} + + + {{ (index $.Data.GraphData $index).MinutesRead }} minutes + + + {{ end }} + + + +
+
+ +
+
+
+
+

+ {{ .Data.DatabaseInfo.DocumentsSize }} +

+

Documents

+
+
+
+ +
+
+
+

+ {{ .Data.DatabaseInfo.ActivitySize }} +

+

Activity Records

+
+
+
+ +
+
+
+

+ {{ .Data.DatabaseInfo.ProgressSize }} +

+

Progress Records

+
+
+
+ +
+
+
+

+ {{ .Data.DatabaseInfo.DevicesSize }} +

+

Devices

+
+
+
+
+ +
+
+
+

+ Daily Read Streak +

+
+

+ {{ .Data.DailyStreak.CurrentStreak }} +

+
+
+
+
+

Current Daily Streak

+
+ {{ .Data.DailyStreak.CurrentStreakStartDate }} ➞ {{ + .Data.DailyStreak.CurrentStreakEndDate }} +
+
+
+ {{ .Data.DailyStreak.CurrentStreak }} +
+
+
+
+

Best Daily Streak

+
+ {{ .Data.DailyStreak.MaxStreakStartDate }} ➞ {{ + .Data.DailyStreak.MaxStreakEndDate }} +
+
+
+ {{ .Data.DailyStreak.MaxStreak }} +
+
+
+
+
+ +
+
+

+ Weekly Read Streak +

+
+

+ {{ .Data.WeeklyStreak.CurrentStreak }} +

+
+
+
+
+

Current Weekly Streak

+
+ {{ .Data.WeeklyStreak.CurrentStreakStartDate }} ➞ {{ + .Data.WeeklyStreak.CurrentStreakEndDate }} +
+
+
+ {{ .Data.WeeklyStreak.CurrentStreak }} +
+
+
+
+

Best Weekly Streak

+
+ {{ .Data.WeeklyStreak.MaxStreakStartDate }} ➞ {{ + .Data.WeeklyStreak.MaxStreakEndDate }} +
+
+
+ {{ .Data.WeeklyStreak.MaxStreak }} +
+
+
+
+
+
+ +{{end}} diff --git a/templates/login.html b/templates/login.html new file mode 100644 index 0000000..3046d20 --- /dev/null +++ b/templates/login.html @@ -0,0 +1,171 @@ + + + + + + + + +
+
+
+

Welcome.

+
+
+
+ + + + + + +
+
+
+
+ + + + + + + {{ .Error }} +
+
+ + +
+ {{ if .Register }} +

+ Trying to login? + + Login here. + +

+ {{else}} +

+ Don't have an account? + + Register here. + +

+ {{end}} +
+
+
+ +
+ + + + + diff --git a/utils/utils.go b/utils/utils.go new file mode 100644 index 0000000..9b2c9a1 --- /dev/null +++ b/utils/utils.go @@ -0,0 +1,49 @@ +package utils + +import ( + "bytes" + "crypto/md5" + // "encoding/hex" + "fmt" + "io" + "os" +) + +func CalculatePartialMD5(filePath string) string { + file, err := os.Open(filePath) + if err != nil { + panic(err) + } + + defer file.Close() + + var step int64 = 1024 + var size int64 = 1024 + var buf bytes.Buffer + + for i := -1; i <= 10; i++ { + byteStep := make([]byte, size) + + var newShift int64 = int64(i * 2) + var newOffset int64 + if i == -1 { + newOffset = 0 + } else { + newOffset = step << newShift + } + + _, err := file.ReadAt(byteStep, newOffset) + if err == io.EOF { + break + } + buf.Write(byteStep) + } + + allBytes := buf.Bytes() + return fmt.Sprintf("%x", md5.Sum(allBytes)) +} + +func main() { + fileHash := CalculatePartialMD5("test.epub") + fmt.Println("MD5: ", fileHash) +}