Dirty hack to reload covers from new books

This commit is contained in:
Las Zenow 2013-07-18 22:08:35 +02:00
parent c01c465330
commit 4b536352de
9 changed files with 985 additions and 1 deletions

1
.gitignore vendored
View file

@ -6,5 +6,6 @@ tools/adduser/adduser
tools/update/update
tools/togridfs/togridfs
tools/getISBNnDesc/getISBNnDesc
tools/coverNew/coverNew
tags
.*.swp

View file

@ -1,4 +1,4 @@
Some tools dirty to manage trantor:
Some dirty tools to manage trantor:
- adduser. Used to add users to trantor:
$ adduser myNick
@ -9,3 +9,5 @@ Password:
- togridfs (23/4/2013). Migrate all files and covers to gridfs
- getISBNnDesc (31/5/2013). Import the ISBN and the description with changes of lines to the database
- coverNew. Reload the cover from all the new books

45
tools/coverNew/config.go Normal file
View file

@ -0,0 +1,45 @@
package main
const (
PORT = "8080"
DB_IP = "127.0.0.1"
DB_NAME = "trantor"
META_COLL = "meta"
BOOKS_COLL = "books"
TAGS_COLL = "tags"
VISITED_COLL = "visited"
DOWNLOADED_COLL = "downloaded"
HOURLY_VISITS_COLL = "visits.hourly"
DAILY_VISITS_COLL = "visits.daily"
MONTHLY_VISITS_COLL = "visits.monthly"
USERS_COLL = "users"
NEWS_COLL = "news"
STATS_COLL = "statistics"
FS_BOOKS = "fs_books"
FS_IMGS = "fs_imgs"
PASS_SALT = "ImperialLibSalt"
MINUTES_UPDATE_TAGS = 11
MINUTES_UPDATE_VISITED = 41
MINUTES_UPDATE_DOWNLOADED = 47
MINUTES_UPDATE_HOURLY = 31
MINUTES_UPDATE_DAILY = 60*12 + 7
MINUTES_UPDATE_MONTHLY = 60*24 + 11
TAGS_DISPLAY = 50
SEARCH_ITEMS_PAGE = 20
NEW_ITEMS_PAGE = 50
NUM_NEWS = 10
DAYS_NEWS_INDEXPAGE = 15
TEMPLATE_PATH = "templates/"
CSS_PATH = "css/"
JS_PATH = "js/"
IMG_PATH = "img/"
IMG_WIDTH_BIG = 300
IMG_WIDTH_SMALL = 60
IMG_QUALITY = 80
CHAN_SIZE = 100
)

204
tools/coverNew/cover.go Normal file
View file

@ -0,0 +1,204 @@
package main
import _ "image/png"
import _ "image/jpeg"
import _ "image/gif"
import (
"bytes"
"git.gitorious.org/go-pkg/epubgo.git"
"github.com/gorilla/mux"
"github.com/nfnt/resize"
"image"
"image/jpeg"
"io"
"io/ioutil"
"labix.org/v2/mgo"
"labix.org/v2/mgo/bson"
"log"
"net/http"
"regexp"
"strings"
)
func coverHandler(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
if !bson.IsObjectIdHex(vars["id"]) {
notFound(w, r)
return
}
id := bson.ObjectIdHex(vars["id"])
books, _, err := db.GetBooks(bson.M{"_id": id})
if err != nil || len(books) == 0 {
notFound(w, r)
return
}
book := books[0]
if !book.Active {
sess := GetSession(r)
if sess.User == "" {
notFound(w, r)
return
}
}
fs := db.GetFS(FS_IMGS)
var f *mgo.GridFile
if vars["size"] == "small" {
f, err = fs.OpenId(book.CoverSmall)
} else {
f, err = fs.OpenId(book.Cover)
}
if err != nil {
log.Println("Error while opening image:", err)
notFound(w, r)
return
}
defer f.Close()
headers := w.Header()
headers["Content-Type"] = []string{"image/jpeg"}
io.Copy(w, f)
}
func GetCover(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
imgId, smallId := coverFromMetadata(e, title)
if imgId != "" {
return imgId, smallId
}
imgId, smallId = searchCommonCoverNames(e, title)
if imgId != "" {
return imgId, smallId
}
/* search for img on the text */
exp, _ := regexp.Compile("<.*ima?g.*[(src)(href)]=[\"']([^\"']*(\\.[^\\.\"']*))[\"']")
it, errNext := e.Spine()
for errNext == nil {
file, err := it.Open()
if err != nil {
break
}
defer file.Close()
txt, err := ioutil.ReadAll(file)
if err != nil {
break
}
res := exp.FindSubmatch(txt)
if res != nil {
href := string(res[1])
urlPart := strings.Split(it.URL(), "/")
url := strings.Join(urlPart[:len(urlPart)-1], "/")
if href[:3] == "../" {
href = href[3:]
url = strings.Join(urlPart[:len(urlPart)-2], "/")
}
href = strings.Replace(href, "%20", " ", -1)
href = strings.Replace(href, "%27", "'", -1)
href = strings.Replace(href, "%28", "(", -1)
href = strings.Replace(href, "%29", ")", -1)
if url == "" {
url = href
} else {
url = url + "/" + href
}
img, err := e.OpenFile(url)
if err == nil {
defer img.Close()
return storeImg(img, title)
}
}
errNext = it.Next()
}
return "", ""
}
func coverFromMetadata(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
metaList, _ := e.MetadataAttr("meta")
for _, meta := range metaList {
if meta["name"] == "cover" {
img, err := e.OpenFileId(meta["content"])
if err == nil {
defer img.Close()
return storeImg(img, title)
}
}
}
return "", ""
}
func searchCommonCoverNames(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
for _, p := range []string{"cover.jpg", "Images/cover.jpg", "images/cover.jpg", "cover.jpeg", "cover1.jpg", "cover1.jpeg"} {
img, err := e.OpenFile(p)
if err == nil {
defer img.Close()
return storeImg(img, title)
}
}
return "", ""
}
func storeImg(img io.Reader, title string) (bson.ObjectId, bson.ObjectId) {
/* open the files */
fBig, err := createCoverFile(title)
if err != nil {
log.Println("Error creating", title, ":", err.Error())
return "", ""
}
defer fBig.Close()
fSmall, err := createCoverFile(title + "_small")
if err != nil {
log.Println("Error creating", title+"_small", ":", err.Error())
return "", ""
}
defer fSmall.Close()
/* resize img */
var img2 bytes.Buffer
img1 := io.TeeReader(img, &img2)
jpgOptions := jpeg.Options{IMG_QUALITY}
imgResized, err := resizeImg(img1, IMG_WIDTH_BIG)
if err != nil {
log.Println("Error resizing big image:", err.Error())
return "", ""
}
err = jpeg.Encode(fBig, imgResized, &jpgOptions)
if err != nil {
log.Println("Error encoding big image:", err.Error())
return "", ""
}
imgSmallResized, err := resizeImg(&img2, IMG_WIDTH_SMALL)
if err != nil {
log.Println("Error resizing small image:", err.Error())
return "", ""
}
err = jpeg.Encode(fSmall, imgSmallResized, &jpgOptions)
if err != nil {
log.Println("Error encoding small image:", err.Error())
return "", ""
}
idBig, _ := fBig.Id().(bson.ObjectId)
idSmall, _ := fSmall.Id().(bson.ObjectId)
return idBig, idSmall
}
func createCoverFile(title string) (*mgo.GridFile, error) {
fs := db.GetFS(FS_IMGS)
return fs.Create(title + ".jpg")
}
func resizeImg(imgReader io.Reader, width uint) (image.Image, error) {
img, _, err := image.Decode(imgReader)
if err != nil {
return nil, err
}
return resize.Resize(width, 0, img, resize.NearestNeighbor), nil
}

View file

@ -0,0 +1,32 @@
package main
import (
"fmt"
"labix.org/v2/mgo/bson"
"net/http"
)
func main() {
db = initDB()
defer db.Close()
books, _, _ := db.GetNewBooks()
for _, book := range books {
fmt.Println(book.Title)
fmt.Println(book.File)
e, err := OpenBook(book.File)
if err != nil {
fmt.Println("================", err)
}
cover, coverSmall := GetCover(e, book.Title)
if cover != "" {
db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"cover": cover, "coversmall": coverSmall})
}
e.Close()
}
}
func notFound(w http.ResponseWriter, r *http.Request) {
// cover.go needs this function to compile
}

230
tools/coverNew/database.go Normal file
View file

@ -0,0 +1,230 @@
package main
import (
"crypto/md5"
"labix.org/v2/mgo"
"labix.org/v2/mgo/bson"
"time"
)
var db *DB
type Book struct {
Id string `bson:"_id"`
Title string
Author []string
Contributor string
Publisher string
Description string
Subject []string
Date string
Lang []string
Isbn string
Type string
Format string
Source string
Relation string
Coverage string
Rights string
Meta string
File bson.ObjectId
Cover bson.ObjectId
CoverSmall bson.ObjectId
Active bool
Keywords []string
}
type News struct {
Date time.Time
Text string
}
type DB struct {
session *mgo.Session
books *mgo.Collection
user *mgo.Collection
news *mgo.Collection
stats *mgo.Collection
mr *MR
}
func initDB() *DB {
var err error
d := new(DB)
d.session, err = mgo.Dial(DB_IP)
if err != nil {
panic(err)
}
database := d.session.DB(DB_NAME)
d.books = database.C(BOOKS_COLL)
d.user = database.C(USERS_COLL)
d.news = database.C(NEWS_COLL)
d.stats = database.C(STATS_COLL)
d.mr = NewMR(database)
return d
}
func (d *DB) Close() {
d.session.Close()
}
func md5Pass(pass string) []byte {
h := md5.New()
hash := h.Sum(([]byte)(PASS_SALT + pass))
return hash
}
func (d *DB) SetPassword(user string, pass string) error {
hash := md5Pass(pass)
return d.user.Update(bson.M{"user": user}, bson.M{"$set": bson.M{"pass": hash}})
}
func (d *DB) UserValid(user string, pass string) bool {
hash := md5Pass(pass)
n, err := d.user.Find(bson.M{"user": user, "pass": hash}).Count()
if err != nil {
return false
}
return n != 0
}
func (d *DB) AddNews(text string) error {
var news News
news.Text = text
news.Date = time.Now()
return d.news.Insert(news)
}
func (d *DB) GetNews(num int, days int) (news []News, err error) {
query := bson.M{}
if days != 0 {
duration := time.Duration(-24*days) * time.Hour
date := time.Now().Add(duration)
query = bson.M{"date": bson.M{"$gt": date}}
}
q := d.news.Find(query).Sort("-date").Limit(num)
err = q.All(&news)
return
}
func (d *DB) InsertStats(stats interface{}) error {
return d.stats.Insert(stats)
}
func (d *DB) InsertBook(book interface{}) error {
return d.books.Insert(book)
}
func (d *DB) RemoveBook(id bson.ObjectId) error {
return d.books.Remove(bson.M{"_id": id})
}
func (d *DB) UpdateBook(id bson.ObjectId, data interface{}) error {
return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data})
}
/* optional parameters: length and start index
*
* Returns: list of books, number found and err
*/
func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
var start, length int
if len(r) > 0 {
length = r[0]
if len(r) > 1 {
start = r[1]
}
}
q := d.books.Find(query).Sort("-_id")
num, err = q.Count()
if err != nil {
return
}
if start != 0 {
q = q.Skip(start)
}
if length != 0 {
q = q.Limit(length)
}
err = q.All(&books)
for i, b := range books {
books[i].Id = bson.ObjectId(b.Id).Hex()
}
return
}
/* Get the most visited books
*/
func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
bookId, err := d.mr.GetMostVisited(num, d.stats)
if err != nil {
return nil, err
}
books = make([]Book, num)
for i, id := range bookId {
d.books.Find(bson.M{"_id": id}).One(&books[i])
books[i].Id = bson.ObjectId(books[i].Id).Hex()
}
return
}
/* Get the most downloaded books
*/
func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
bookId, err := d.mr.GetMostDownloaded(num, d.stats)
if err != nil {
return nil, err
}
books = make([]Book, num)
for i, id := range bookId {
d.books.Find(bson.M{"_id": id}).One(&books[i])
books[i].Id = bson.ObjectId(books[i].Id).Hex()
}
return
}
/* optional parameters: length and start index
*
* Returns: list of books, number found and err
*/
func (d *DB) GetNewBooks(r ...int) (books []Book, num int, err error) {
return d.GetBooks(bson.M{"$nor": []bson.M{{"active": true}}}, r...)
}
func (d *DB) BookActive(id bson.ObjectId) bool {
var book Book
err := d.books.Find(bson.M{"_id": id}).One(&book)
if err != nil {
return false
}
return book.Active
}
func (d *DB) GetFS(prefix string) *mgo.GridFS {
return d.session.DB(DB_NAME).GridFS(prefix)
}
func (d *DB) GetTags(numTags int) ([]string, error) {
return d.mr.GetTags(numTags, d.books)
}
type Visits struct {
Date int64 "_id"
Count int "value"
}
func (d *DB) GetHourVisits(start time.Time) ([]Visits, error) {
return d.mr.GetHourVisits(start, d.stats)
}
func (d *DB) GetDayVisits(start time.Time) ([]Visits, error) {
return d.mr.GetDayVisits(start, d.stats)
}
func (d *DB) GetMonthVisits(start time.Time) ([]Visits, error) {
return d.mr.GetMonthVisits(start, d.stats)
}

266
tools/coverNew/mapreduce.go Normal file
View file

@ -0,0 +1,266 @@
package main
import (
"labix.org/v2/mgo"
"labix.org/v2/mgo/bson"
"time"
)
type MR struct {
meta *mgo.Collection
tags *mgo.Collection
visited *mgo.Collection
downloaded *mgo.Collection
hourly_raw *mgo.Collection
daily_raw *mgo.Collection
monthly_raw *mgo.Collection
hourly *mgo.Collection
daily *mgo.Collection
monthly *mgo.Collection
}
func NewMR(database *mgo.Database) *MR {
m := new(MR)
m.meta = database.C(META_COLL)
m.tags = database.C(TAGS_COLL)
m.visited = database.C(VISITED_COLL)
m.downloaded = database.C(DOWNLOADED_COLL)
m.hourly_raw = database.C(HOURLY_VISITS_COLL + "_raw")
m.daily_raw = database.C(DAILY_VISITS_COLL + "_raw")
m.monthly_raw = database.C(MONTHLY_VISITS_COLL + "_raw")
m.hourly = database.C(HOURLY_VISITS_COLL)
m.daily = database.C(DAILY_VISITS_COLL)
m.monthly = database.C(MONTHLY_VISITS_COLL)
return m
}
func (m *MR) GetTags(numTags int, booksColl *mgo.Collection) ([]string, error) {
if m.isOutdated(TAGS_COLL, MINUTES_UPDATE_TAGS) {
var mr mgo.MapReduce
mr.Map = `function() {
if (this.subject) {
this.subject.forEach(function(s) { emit(s, 1); });
}
}`
mr.Reduce = `function(tag, vals) {
var count = 0;
vals.forEach(function() { count += 1; });
return count;
}`
err := m.update(&mr, bson.M{"active": true}, booksColl, TAGS_COLL)
if err != nil {
return nil, err
}
}
var result []struct {
Tag string "_id"
}
err := m.tags.Find(nil).Sort("-value").Limit(numTags).All(&result)
if err != nil {
return nil, err
}
tags := make([]string, len(result))
for i, r := range result {
tags[i] = r.Tag
}
return tags, nil
}
func (m *MR) GetMostVisited(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
if m.isOutdated(VISITED_COLL, MINUTES_UPDATE_VISITED) {
var mr mgo.MapReduce
mr.Map = `function() {
emit(this.id, 1);
}`
mr.Reduce = `function(tag, vals) {
var count = 0;
vals.forEach(function() { count += 1; });
return count;
}`
err := m.update(&mr, bson.M{"section": "book"}, statsColl, VISITED_COLL)
if err != nil {
return nil, err
}
}
var result []struct {
Book bson.ObjectId "_id"
}
err := m.visited.Find(nil).Sort("-value").Limit(num).All(&result)
if err != nil {
return nil, err
}
books := make([]bson.ObjectId, len(result))
for i, r := range result {
books[i] = r.Book
}
return books, nil
}
func (m *MR) GetMostDownloaded(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
if m.isOutdated(DOWNLOADED_COLL, MINUTES_UPDATE_DOWNLOADED) {
var mr mgo.MapReduce
mr.Map = `function() {
emit(this.id, 1);
}`
mr.Reduce = `function(tag, vals) {
var count = 0;
vals.forEach(function() { count += 1; });
return count;
}`
err := m.update(&mr, bson.M{"section": "download"}, statsColl, DOWNLOADED_COLL)
if err != nil {
return nil, err
}
}
var result []struct {
Book bson.ObjectId "_id"
}
err := m.downloaded.Find(nil).Sort("-value").Limit(num).All(&result)
if err != nil {
return nil, err
}
books := make([]bson.ObjectId, len(result))
for i, r := range result {
books[i] = r.Book
}
return books, nil
}
func (m *MR) GetHourVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
if m.isOutdated(HOURLY_VISITS_COLL, MINUTES_UPDATE_HOURLY) {
const reduce = `function(date, vals) {
var count = 0;
vals.forEach(function(v) { count += v; });
return count;
}`
var mr mgo.MapReduce
mr.Map = `function() {
var date = Date.UTC(this.date.getUTCFullYear(),
this.date.getUTCMonth(),
this.date.getUTCDate(),
this.date.getUTCHours());
emit({date: date, session: this.session}, 1);
}`
mr.Reduce = reduce
err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, HOURLY_VISITS_COLL+"_raw")
if err != nil {
return nil, err
}
var mr2 mgo.MapReduce
mr2.Map = `function() {
emit(this['_id']['date'], 1);
}`
mr2.Reduce = reduce
err = m.update(&mr2, bson.M{}, m.hourly_raw, HOURLY_VISITS_COLL)
if err != nil {
return nil, err
}
}
var result []Visits
err := m.hourly.Find(nil).All(&result)
return result, err
}
func (m *MR) GetDayVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
if m.isOutdated(DAILY_VISITS_COLL, MINUTES_UPDATE_DAILY) {
const reduce = `function(date, vals) {
var count = 0;
vals.forEach(function(v) { count += v; });
return count;
}`
var mr mgo.MapReduce
mr.Map = `function() {
var date = Date.UTC(this.date.getUTCFullYear(),
this.date.getUTCMonth(),
this.date.getUTCDate());
emit({date: date, session: this.session}, 1);
}`
mr.Reduce = reduce
err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, DAILY_VISITS_COLL+"_raw")
if err != nil {
return nil, err
}
var mr2 mgo.MapReduce
mr2.Map = `function() {
emit(this['_id']['date'], 1);
}`
mr2.Reduce = reduce
err = m.update(&mr2, bson.M{}, m.daily_raw, DAILY_VISITS_COLL)
if err != nil {
return nil, err
}
}
var result []Visits
err := m.daily.Find(nil).All(&result)
return result, err
}
func (m *MR) GetMonthVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
if m.isOutdated(MONTHLY_VISITS_COLL, MINUTES_UPDATE_MONTHLY) {
const reduce = `function(date, vals) {
var count = 0;
vals.forEach(function(v) { count += v; });
return count;
}`
var mr mgo.MapReduce
mr.Map = `function() {
var date = Date.UTC(this.date.getUTCFullYear(),
this.date.getUTCMonth());
emit({date: date, session: this.session}, 1);
}`
mr.Reduce = reduce
err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, MONTHLY_VISITS_COLL+"_raw")
if err != nil {
return nil, err
}
var mr2 mgo.MapReduce
mr2.Map = `function() {
emit(this['_id']['date'], 1);
}`
mr2.Reduce = reduce
err = m.update(&mr2, bson.M{}, m.monthly_raw, MONTHLY_VISITS_COLL)
if err != nil {
return nil, err
}
}
var result []Visits
err := m.monthly.Find(nil).All(&result)
return result, err
}
func (m *MR) update(mr *mgo.MapReduce, query bson.M, queryColl *mgo.Collection, storeColl string) error {
_, err := m.meta.RemoveAll(bson.M{"type": storeColl})
if err != nil {
return err
}
mr.Out = bson.M{"replace": storeColl}
_, err = queryColl.Find(query).MapReduce(mr, nil)
if err != nil {
return err
}
return m.meta.Insert(bson.M{"type": storeColl})
}
func (m *MR) isOutdated(coll string, minutes float64) bool {
var result struct {
Id bson.ObjectId `bson:"_id"`
}
err := m.meta.Find(bson.M{"type": coll}).One(&result)
if err != nil {
return true
}
lastUpdate := result.Id.Time()
return time.Since(lastUpdate).Minutes() > minutes
}

76
tools/coverNew/session.go Normal file
View file

@ -0,0 +1,76 @@
package main
import (
"encoding/hex"
"github.com/gorilla/securecookie"
"github.com/gorilla/sessions"
"net/http"
)
var sesStore = sessions.NewCookieStore(securecookie.GenerateRandomKey(64))
type Notification struct {
Title string
Msg string
Type string /* error, info or success */
}
type Session struct {
User string
Notif []Notification
S *sessions.Session
}
func getNotif(session *sessions.Session) []Notification {
msgs := session.Flashes("nMsg")
titles := session.Flashes("nTitle")
tpes := session.Flashes("nType")
notif := make([]Notification, len(msgs))
for i, m := range msgs {
msg, _ := m.(string)
title, _ := titles[i].(string)
tpe, _ := tpes[i].(string)
notif[i] = Notification{title, msg, tpe}
}
return notif
}
func GetSession(r *http.Request) (s *Session) {
s = new(Session)
var err error
s.S, err = sesStore.Get(r, "session")
if err == nil && !s.S.IsNew {
s.User, _ = s.S.Values["user"].(string)
s.Notif = getNotif(s.S)
}
if s.S.IsNew {
s.S.Values["id"] = hex.EncodeToString(securecookie.GenerateRandomKey(16))
}
return
}
func (s *Session) LogIn(user string) {
s.User = user
s.S.Values["user"] = user
}
func (s *Session) LogOut() {
s.S.Values["user"] = ""
}
func (s *Session) Notify(title, msg, tpe string) {
s.S.AddFlash(msg, "nMsg")
s.S.AddFlash(title, "nTitle")
s.S.AddFlash(tpe, "nType")
}
func (s *Session) Save(w http.ResponseWriter, r *http.Request) {
sesStore.Save(r, w, s.S)
}
func (s *Session) Id() string {
id, _ := s.S.Values["id"].(string)
return id
}

128
tools/coverNew/store.go Normal file
View file

@ -0,0 +1,128 @@
package main
import (
"bytes"
"git.gitorious.org/go-pkg/epubgo.git"
"io"
"io/ioutil"
"labix.org/v2/mgo/bson"
"regexp"
"strings"
)
func OpenBook(id bson.ObjectId) (*epubgo.Epub, error) {
fs := db.GetFS(FS_BOOKS)
f, err := fs.OpenId(id)
if err != nil {
return nil, err
}
defer f.Close()
buff, err := ioutil.ReadAll(f)
reader := bytes.NewReader(buff)
return epubgo.Load(reader, int64(len(buff)))
}
func StoreNewFile(name string, file io.Reader) (bson.ObjectId, error) {
fs := db.GetFS(FS_BOOKS)
fw, err := fs.Create(name)
if err != nil {
return "", err
}
defer fw.Close()
_, err = io.Copy(fw, file)
id, _ := fw.Id().(bson.ObjectId)
return id, err
}
func DeleteFile(id bson.ObjectId) error {
fs := db.GetFS(FS_BOOKS)
return fs.RemoveId(id)
}
func DeleteCover(id bson.ObjectId) error {
fs := db.GetFS(FS_IMGS)
return fs.RemoveId(id)
}
func DeleteBook(book Book) {
if book.Cover != "" {
DeleteCover(book.Cover)
}
if book.CoverSmall != "" {
DeleteCover(book.CoverSmall)
}
DeleteFile(book.File)
}
func cleanStr(str string) string {
str = strings.Replace(str, "&#39;", "'", -1)
exp, _ := regexp.Compile("&[^;]*;")
str = exp.ReplaceAllString(str, "")
exp, _ = regexp.Compile("[ ,]*$")
str = exp.ReplaceAllString(str, "")
return str
}
func parseAuthr(creator []string) []string {
exp1, _ := regexp.Compile("^(.*\\( *([^\\)]*) *\\))*$")
exp2, _ := regexp.Compile("^[^:]*: *(.*)$")
res := make([]string, len(creator))
for i, s := range creator {
auth := exp1.FindStringSubmatch(s)
if auth != nil {
res[i] = cleanStr(strings.Join(auth[2:], ", "))
} else {
auth := exp2.FindStringSubmatch(s)
if auth != nil {
res[i] = cleanStr(auth[1])
} else {
res[i] = cleanStr(s)
}
}
}
return res
}
func parseDescription(description []string) string {
str := cleanStr(strings.Join(description, "\n"))
str = strings.Replace(str, "</p>", "\n", -1)
exp, _ := regexp.Compile("<[^>]*>")
str = exp.ReplaceAllString(str, "")
str = strings.Replace(str, "&amp;", "&", -1)
str = strings.Replace(str, "&lt;", "<", -1)
str = strings.Replace(str, "&gt;", ">", -1)
str = strings.Replace(str, "\\n", "\n", -1)
return str
}
func parseSubject(subject []string) []string {
var res []string
for _, s := range subject {
res = append(res, strings.Split(s, " / ")...)
}
return res
}
func parseDate(date []string) string {
if len(date) == 0 {
return ""
}
return strings.Replace(date[0], "Unspecified: ", "", -1)
}
func keywords(b map[string]interface{}) (k []string) {
title, _ := b["title"].(string)
k = strings.Split(title, " ")
author, _ := b["author"].([]string)
for _, a := range author {
k = append(k, strings.Split(a, " ")...)
}
publisher, _ := b["publisher"].(string)
k = append(k, strings.Split(publisher, " ")...)
subject, _ := b["subject"].([]string)
k = append(k, subject...)
return
}