package trantor import ( log "github.com/cihub/seelog" "fmt" "io" "net/http" "path" "strings" "github.com/gorilla/mux" "gitlab.com/trantor/trantor/lib/database" ) const ( booksFrontPage = 6 daysNewsIndexpage = 15 cacheMaxAge = 1800 epubFile = "book.epub" ) type statusData struct { S Status } func aboutHandler(h handler) { var data statusData data.S = GetStatus(h) data.S.Title = "About -- " + data.S.Title data.S.About = true h.template.load(h, "about", data) } func helpHandler(h handler) { var data statusData data.S = GetStatus(h) data.S.Title = "Help -- " + data.S.Title data.S.Help = true h.template.load(h, "help", data) } func logoutHandler(h handler) { h.sess.LogOut() h.sess.Notify("Log out!", "Bye bye "+h.sess.User, "success") h.sess.Save(h.w, h.r) log.Info("User ", h.sess.User, " log out") http.Redirect(h.w, h.r, "/", http.StatusFound) } type bookData struct { S Status Book database.Book Description []string FlaggedBadQuality bool } func bookHandler(h handler) { id := mux.Vars(h.r)["id"] var data bookData data.S = GetStatus(h) book, err := h.db.GetBookId(id) if err != nil { notFound(h) return } data.Book = book data.S.Title = book.Title + " by " + book.Author[0] + " -- " + data.S.Title data.Description = strings.Split(data.Book.Description, "\n") data.FlaggedBadQuality = false for _, reporter := range book.BadQualityReporters { if reporter == h.sess.User || reporter == h.sess.Id() { data.FlaggedBadQuality = true break } } h.template.load(h, "book", data) } func downloadHandler(h handler) { id := mux.Vars(h.r)["id"] book, err := h.db.GetBookId(id) if err != nil { notFound(h) return } if !book.Active { if !h.sess.IsAdmin() { notFound(h) return } } f, err := h.store.Get(book.Id, epubFile) if err != nil { notFound(h) return } defer f.Close() headers := h.w.Header() headers["Content-Type"] = []string{"application/epub+zip"} headers["Content-Disposition"] = []string{"attachment; filename=\"" + book.Title + ".epub\""} io.Copy(h.w, f) } func flagHandler(h handler) { id := mux.Vars(h.r)["id"] user := h.sess.Id() if h.sess.User != "" { user = h.sess.User } err := h.db.FlagBadQuality(id, user) if err != nil { log.Warn("An error ocurred while flaging ", id, ": ", err) } h.sess.Notify("Flagged!", "Book marked as bad quality, thank you", "success") h.sess.Save(h.w, h.r) http.Redirect(h.w, h.r, h.r.Referer(), http.StatusFound) } type indexData struct { S Status Books []database.Book VisitedBooks []database.Book DownloadedBooks []database.Book Count int Tags []string News []newsEntry } func indexHandler(h handler) { var data indexData data.Tags, _ = h.db.GetTags() data.S = GetStatus(h) data.S.Home = true data.Books, data.Count, _ = h.db.GetBooks("", booksFrontPage, 0) data.VisitedBooks, _ = h.db.GetVisitedBooks() data.DownloadedBooks, _ = h.db.GetDownloadedBooks() data.News = getNews(1, daysNewsIndexpage, h.db) h.template.load(h, "index", data) } func notFound(h handler) { var data statusData data.S = GetStatus(h) data.S.Title = "Not found --" + data.S.Title h.w.WriteHeader(http.StatusNotFound) h.template.load(h, "404", data) } func UpdateLogger(loggerConfig string) error { logger, err := log.LoggerFromConfigAsFile(loggerConfig) if err != nil { return err } return log.ReplaceLogger(logger) } func InitRouter(db *database.DB, sg *StatsGatherer, assetsPath string) { const idPattern = "[0-9a-zA-Z\\-\\_]{16}" r := mux.NewRouter() var notFoundHandler http.HandlerFunc notFoundHandler = sg.Gather(notFound) r.NotFoundHandler = notFoundHandler r.HandleFunc("/", sg.Gather(indexHandler)) for _, file := range []string{"robots.txt", "description.json", "opensearch.xml", "key.asc"} { serveFunc := func(w http.ResponseWriter, r *http.Request) { http.ServeFile(w, r, path.Join(assetsPath, file)) } r.HandleFunc("/"+file, serveFunc) } for _, folder := range []string{"img", "css", "js"} { r.HandleFunc("/"+folder+"/{"+folder+"}", fileServer(path.Join(assetsPath, folder), "/"+folder+"/")) } r.HandleFunc("/book/{id:"+idPattern+"}", sg.Gather(bookHandler)) r.HandleFunc("/search/", sg.Gather(searchHandler)) r.HandleFunc("/upload/", sg.Gather(uploadHandler)).Methods("GET") r.HandleFunc("/upload/", sg.Gather(uploadPostHandler)).Methods("POST") r.HandleFunc("/read/{id:"+idPattern+"}", sg.Gather(readStartHandler)) r.HandleFunc("/read/{id:"+idPattern+"}/{file:.*}", sg.Gather(readHandler)) r.HandleFunc("/content/{id:"+idPattern+"}/{file:.*}", sg.Gather(contentHandler)) r.HandleFunc("/about/", sg.Gather(aboutHandler)) r.HandleFunc("/help/", sg.Gather(helpHandler)) r.HandleFunc("/download/{id:"+idPattern+"}/{epub:.*}", sg.Gather(downloadHandler)) r.HandleFunc("/cover/{id:"+idPattern+"}/{size}/{img:.*}", sg.Gather(coverHandler)) r.HandleFunc("/stats/", sg.Gather(statsHandler)) r.HandleFunc("/flag/bad_quality/{id:"+idPattern+"}", sg.Gather(flagHandler)) r.HandleFunc("/login/", sg.Gather(loginHandler)).Methods("GET") r.HandleFunc("/login/", sg.Gather(loginPostHandler)).Methods("POST") r.HandleFunc("/create_user/", sg.Gather(createUserHandler)).Methods("POST") r.HandleFunc("/logout/", sg.Gather(logoutHandler)) r.HandleFunc("/dashboard/", sg.Gather(dashboardHandler)) r.HandleFunc("/settings/", sg.Gather(settingsHandler)) r.HandleFunc("/new/", sg.Gather(newHandler)) r.HandleFunc("/save/{id:"+idPattern+"}", sg.Gather(saveHandler)).Methods("POST") r.HandleFunc("/edit/{id:"+idPattern+"}", sg.Gather(editHandler)) r.HandleFunc("/store/{ids:("+idPattern+"/)+}", sg.Gather(storeHandler)) r.HandleFunc("/delete/{ids:("+idPattern+"/)+}", sg.Gather(deleteHandler)) r.HandleFunc("/news/", sg.Gather(newsHandler)) r.HandleFunc("/news/edit", sg.Gather(editNewsHandler)).Methods("GET") r.HandleFunc("/news/edit", sg.Gather(postNewsHandler)).Methods("POST") http.Handle("/", r) } func fileServer(servePath string, prefix string) func(w http.ResponseWriter, r *http.Request) { // FIXME: is there a cleaner way without handler? h := http.FileServer(http.Dir(servePath)) handler := http.StripPrefix(prefix, h) return func(w http.ResponseWriter, r *http.Request) { addCacheControlHeader(w, false) handler.ServeHTTP(w, r) } } func addCacheControlHeader(w http.ResponseWriter, private bool) { // FIXME: cache of download and cover don't depends on user login if private { w.Header().Set("Cache-Control", fmt.Sprintf("max-age=%d, private", cacheMaxAge)) } else { w.Header().Set("Cache-Control", fmt.Sprintf("max-age=%d, public", cacheMaxAge)) } }