Improve memory footprint on upload books

This commit is contained in:
Las Zenow 2017-09-26 23:46:03 +00:00
parent 04b636a62c
commit 548bcd3b70
2 changed files with 51 additions and 49 deletions

View file

@ -108,35 +108,39 @@ type statsRequest struct {
func (sg StatsGatherer) worker() {
for req := range sg.channel {
var err error
id := mux.Vars(req.r)["id"]
search := strings.Join(req.r.Form["q"], " ")
fmt := req.r.FormValue("fmt")
pattern := strings.Split(req.r.URL.Path, "/")
section := "/"
if len(pattern) > 1 && pattern[1] != "" {
section = pattern[1]
}
sg.instrument.Request(instrument.RequestData{
Section: section,
ID: id,
Search: search,
Fmt: fmt,
Duration: req.duration,
})
switch section {
case "download":
err = sg.db.IncDownloads(id)
case "book":
err = sg.db.IncViews(id)
case "read":
err = sg.db.IncViews(id)
}
if err != nil {
log.Warn("Problem incrementing visits: ", err)
}
sg.save(req)
}
}
func (sg StatsGatherer) save(req statsRequest) {
var err error
id := mux.Vars(req.r)["id"]
search := strings.Join(req.r.Form["q"], " ")
fmt := req.r.FormValue("fmt")
pattern := strings.Split(req.r.URL.Path, "/")
section := "/"
if len(pattern) > 1 && pattern[1] != "" {
section = pattern[1]
}
sg.instrument.Request(instrument.RequestData{
Section: section,
ID: id,
Search: search,
Fmt: fmt,
Duration: req.duration,
})
switch section {
case "download":
err = sg.db.IncDownloads(id)
case "book":
err = sg.db.IncViews(id)
case "read":
err = sg.db.IncViews(id)
}
if err != nil {
log.Warn("Problem incrementing visits: ", err)
}
}

View file

@ -3,10 +3,8 @@ package trantor
import (
log "github.com/cihub/seelog"
"bytes"
"crypto/rand"
"encoding/base64"
"io/ioutil"
"mime/multipart"
"github.com/gorilla/mux"
@ -28,25 +26,24 @@ func InitUpload(database database.DB, store storage.Store) {
var uploadChannel chan uploadRequest
type uploadRequest struct {
file multipart.File
filename string
id int
file multipart.File
header *multipart.FileHeader
id int
}
func uploadWorker(database database.DB, store storage.Store) {
for req := range uploadChannel {
processFile(req, database, store)
req.processFile(database, store)
}
}
func processFile(req uploadRequest, db database.DB, store storage.Store) {
func (req uploadRequest) processFile(db database.DB, store storage.Store) {
defer req.file.Close()
epub, err := openMultipartEpub(req.file)
epub, err := epubgo.Load(req.file, req.header.Size)
if err != nil {
log.Warn("Not valid epub uploaded file ", req.filename, ": ", err)
log.Warn("Not valid epub uploaded file ", req.header.Filename, ": ", err)
db.UpdateSubmission(req.id, "It is not a valid epub file", nil)
return
}
defer epub.Close()
@ -70,18 +67,25 @@ func processFile(req uploadRequest, db database.DB, store storage.Store) {
db.UpdateSubmission(req.id, "There was a problem in our server", nil)
return
}
log.Info("File uploaded: ", req.filename)
log.Info("File uploaded: ", req.header.Filename)
db.UpdateSubmission(req.id, "Waiting for moderation", &book)
}
func uploadPostHandler(h handler) {
const _2M int64 = (1 << 20) * 2
if h.ro {
h.sess.Notify("Upload failed!", "The library is in Read Only mode, no books can be uploaded", "error")
uploadHandler(h)
return
}
h.r.ParseMultipartForm(20000000)
if err := h.r.ParseMultipartForm(_2M); nil != err {
log.Error("Can't parse form: ", err)
return
}
defer h.r.MultipartForm.RemoveAll()
filesForm := h.r.MultipartForm.File["epub"]
submissionID := genID()
for _, f := range filesForm {
@ -104,7 +108,7 @@ func uploadPostHandler(h handler) {
if err != nil {
log.Error("Can add submission to db for ", f.Filename, ": ", err)
}
uploadChannel <- uploadRequest{file, f.Filename, id}
uploadChannel <- uploadRequest{file, f, id}
}
_uploadHandler(h, submissionID)
}
@ -146,12 +150,6 @@ type submissionData struct {
Submissions []database.Submission
}
func openMultipartEpub(file multipart.File) (*epubgo.Epub, error) {
buff, _ := ioutil.ReadAll(file)
reader := bytes.NewReader(buff)
return epubgo.Load(reader, int64(len(buff)))
}
func genID() string {
b := make([]byte, 12)
rand.Read(b)