From 0cbda42c587be1d508b3a6c7c90a026d9d8c6c76 Mon Sep 17 00:00:00 2001
From: Las Zenow
Date: Thu, 28 Aug 2014 02:16:10 -0500
Subject: [PATCH] Remove the tools folder
Everything is moved to it's own repo
---
tools/README | 21 ---
tools/addsize/addsize.go | 38 ----
tools/addsize/config.go | 45 -----
tools/addsize/database.go | 243 ------------------------
tools/addsize/mapreduce.go | 266 ---------------------------
tools/adduser/adduser.go | 28 ---
tools/coverNew/config.go | 45 -----
tools/coverNew/cover.go | 204 --------------------
tools/coverNew/coverNew.go | 31 ----
tools/coverNew/database.go | 230 -----------------------
tools/coverNew/mapreduce.go | 266 ---------------------------
tools/coverNew/session.go | 76 --------
tools/coverNew/store.go | 128 -------------
tools/getISBNnDesc/config.go | 32 ----
tools/getISBNnDesc/database.go | 248 -------------------------
tools/getISBNnDesc/get.go | 65 -------
tools/getISBNnDesc/store.go | 128 -------------
tools/importer/config.go | 53 ------
tools/importer/cover.go | 202 --------------------
tools/importer/database.go | 327 ---------------------------------
tools/importer/importer.go | 65 -------
tools/importer/mapreduce.go | 283 ----------------------------
tools/importer/search.go | 85 ---------
tools/importer/session.go | 81 --------
tools/importer/stats.go | 244 ------------------------
tools/importer/store.go | 128 -------------
tools/importer/upload.go | 146 ---------------
tools/keywords/config.go | 40 ----
tools/keywords/keywords.go | 63 -------
tools/store/store.go | 148 ---------------
tools/togridfs/config.go | 32 ----
tools/togridfs/cover.go | 141 --------------
tools/togridfs/database.go | 252 -------------------------
tools/togridfs/togridfs.go | 87 ---------
tools/update/config.go | 22 ---
tools/update/database.go | 214 ---------------------
tools/update/store.go | 265 --------------------------
tools/update/update.go | 27 ---
38 files changed, 4999 deletions(-)
delete mode 100644 tools/README
delete mode 100644 tools/addsize/addsize.go
delete mode 100644 tools/addsize/config.go
delete mode 100644 tools/addsize/database.go
delete mode 100644 tools/addsize/mapreduce.go
delete mode 100644 tools/adduser/adduser.go
delete mode 100644 tools/coverNew/config.go
delete mode 100644 tools/coverNew/cover.go
delete mode 100644 tools/coverNew/coverNew.go
delete mode 100644 tools/coverNew/database.go
delete mode 100644 tools/coverNew/mapreduce.go
delete mode 100644 tools/coverNew/session.go
delete mode 100644 tools/coverNew/store.go
delete mode 100644 tools/getISBNnDesc/config.go
delete mode 100644 tools/getISBNnDesc/database.go
delete mode 100644 tools/getISBNnDesc/get.go
delete mode 100644 tools/getISBNnDesc/store.go
delete mode 100644 tools/importer/config.go
delete mode 100644 tools/importer/cover.go
delete mode 100644 tools/importer/database.go
delete mode 100644 tools/importer/importer.go
delete mode 100644 tools/importer/mapreduce.go
delete mode 100644 tools/importer/search.go
delete mode 100644 tools/importer/session.go
delete mode 100644 tools/importer/stats.go
delete mode 100644 tools/importer/store.go
delete mode 100644 tools/importer/upload.go
delete mode 100644 tools/keywords/config.go
delete mode 100644 tools/keywords/keywords.go
delete mode 100644 tools/store/store.go
delete mode 100644 tools/togridfs/config.go
delete mode 100644 tools/togridfs/cover.go
delete mode 100644 tools/togridfs/database.go
delete mode 100644 tools/togridfs/togridfs.go
delete mode 100644 tools/update/config.go
delete mode 100644 tools/update/database.go
delete mode 100644 tools/update/store.go
delete mode 100644 tools/update/update.go
diff --git a/tools/README b/tools/README
deleted file mode 100644
index 72212df..0000000
--- a/tools/README
+++ /dev/null
@@ -1,21 +0,0 @@
-Some dirty tools to manage trantor:
-
-- adduser. Used to add users to trantor:
-$ adduser myNick
-Password:
-
-- importer. import all epubs passed as parameter into the database and approve them
-
-- update. Update the cover of all the books. It might be outdated.
-
-- togridfs (23/4/2013). Migrate all files and covers to gridfs
-
-- getISBNnDesc (31/5/2013). Import the ISBN and the description with changes of lines to the database
-
-- coverNew. Reload the cover from all the new books
-
-- addsize. Add the size of the books to the book metadata
-
-- keywords. Recalculate keywords
-
-- store. Move files from the database to the local storage
diff --git a/tools/addsize/addsize.go b/tools/addsize/addsize.go
deleted file mode 100644
index 0c8d6a9..0000000
--- a/tools/addsize/addsize.go
+++ /dev/null
@@ -1,38 +0,0 @@
-package main
-
-import (
- "fmt"
- "labix.org/v2/mgo/bson"
-)
-
-func main() {
- db = initDB()
- defer db.Close()
- books, _, _ := db.GetBooks(bson.M{})
-
- for _, book := range books {
- size, err := getSize(book.File)
- if err != nil {
- fmt.Println(err)
- continue
- }
- err = db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"filesize": size})
- if err != nil {
- fmt.Println(err)
- }
- }
-}
-
-type file struct {
- Length int
-}
-
-func getSize(id bson.ObjectId) (int, error) {
- fs := db.GetFS(FS_BOOKS)
- var f file
- err := fs.Find(bson.M{"_id": id}).One(&f)
- if err != nil {
- return 0, err
- }
- return f.Length, nil
-}
diff --git a/tools/addsize/config.go b/tools/addsize/config.go
deleted file mode 100644
index f92f3a3..0000000
--- a/tools/addsize/config.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package main
-
-const (
- PORT = "8080"
-
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- META_COLL = "meta"
- BOOKS_COLL = "books"
- TAGS_COLL = "tags"
- VISITED_COLL = "visited"
- DOWNLOADED_COLL = "downloaded"
- HOURLY_VISITS_COLL = "visits.hourly"
- DAILY_VISITS_COLL = "visits.daily"
- MONTHLY_VISITS_COLL = "visits.monthly"
- USERS_COLL = "users"
- NEWS_COLL = "news"
- STATS_COLL = "statistics"
- FS_BOOKS = "fs_books"
- FS_IMGS = "fs_imgs"
-
- PASS_SALT = "ImperialLibSalt"
- MINUTES_UPDATE_TAGS = 11
- MINUTES_UPDATE_VISITED = 41
- MINUTES_UPDATE_DOWNLOADED = 47
- MINUTES_UPDATE_HOURLY = 31
- MINUTES_UPDATE_DAILY = 60*12 + 7
- MINUTES_UPDATE_MONTHLY = 60*24 + 11
- TAGS_DISPLAY = 50
- SEARCH_ITEMS_PAGE = 20
- NEW_ITEMS_PAGE = 50
- NUM_NEWS = 10
- DAYS_NEWS_INDEXPAGE = 15
-
- TEMPLATE_PATH = "templates/"
- CSS_PATH = "css/"
- JS_PATH = "js/"
- IMG_PATH = "img/"
-
- IMG_WIDTH_BIG = 300
- IMG_WIDTH_SMALL = 60
- IMG_QUALITY = 80
-
- CHAN_SIZE = 100
-)
diff --git a/tools/addsize/database.go b/tools/addsize/database.go
deleted file mode 100644
index e22693f..0000000
--- a/tools/addsize/database.go
+++ /dev/null
@@ -1,243 +0,0 @@
-package main
-
-import (
- "crypto/md5"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "time"
-)
-
-var db *DB
-
-type Book struct {
- Id string `bson:"_id"`
- Title string
- Author []string
- Contributor string
- Publisher string
- Description string
- Subject []string
- Date string
- Lang []string
- Isbn string
- Type string
- Format string
- Source string
- Relation string
- Coverage string
- Rights string
- Meta string
- File bson.ObjectId
- FileSize int
- Cover bson.ObjectId
- CoverSmall bson.ObjectId
- Active bool
- Keywords []string
-}
-
-type News struct {
- Date time.Time
- Text string
-}
-
-type DB struct {
- session *mgo.Session
- books *mgo.Collection
- user *mgo.Collection
- news *mgo.Collection
- stats *mgo.Collection
- mr *MR
-}
-
-func initDB() *DB {
- var err error
- d := new(DB)
- d.session, err = mgo.Dial(DB_IP)
- if err != nil {
- panic(err)
- }
-
- database := d.session.DB(DB_NAME)
- d.books = database.C(BOOKS_COLL)
- d.user = database.C(USERS_COLL)
- d.news = database.C(NEWS_COLL)
- d.stats = database.C(STATS_COLL)
- d.mr = NewMR(database)
- return d
-}
-
-func (d *DB) Close() {
- d.session.Close()
-}
-
-func md5Pass(pass string) []byte {
- h := md5.New()
- hash := h.Sum(([]byte)(PASS_SALT + pass))
- return hash
-}
-
-func (d *DB) SetPassword(user string, pass string) error {
- hash := md5Pass(pass)
- return d.user.Update(bson.M{"user": user}, bson.M{"$set": bson.M{"pass": hash}})
-}
-
-func (d *DB) UserValid(user string, pass string) bool {
- hash := md5Pass(pass)
- n, err := d.user.Find(bson.M{"user": user, "pass": hash}).Count()
- if err != nil {
- return false
- }
- return n != 0
-}
-
-func (d *DB) UserRole(user string) string {
- type result struct {
- Role string
- }
- res := result{}
- err := d.user.Find(bson.M{"user": user}).One(&res)
- if err != nil {
- return ""
- }
- return res.Role
-}
-
-func (d *DB) AddNews(text string) error {
- var news News
- news.Text = text
- news.Date = time.Now()
- return d.news.Insert(news)
-}
-
-func (d *DB) GetNews(num int, days int) (news []News, err error) {
- query := bson.M{}
- if days != 0 {
- duration := time.Duration(-24*days) * time.Hour
- date := time.Now().Add(duration)
- query = bson.M{"date": bson.M{"$gt": date}}
- }
- q := d.news.Find(query).Sort("-date").Limit(num)
- err = q.All(&news)
- return
-}
-
-func (d *DB) InsertStats(stats interface{}) error {
- return d.stats.Insert(stats)
-}
-
-func (d *DB) InsertBook(book interface{}) error {
- return d.books.Insert(book)
-}
-
-func (d *DB) RemoveBook(id bson.ObjectId) error {
- return d.books.Remove(bson.M{"_id": id})
-}
-
-func (d *DB) UpdateBook(id bson.ObjectId, data interface{}) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data})
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
- var start, length int
- if len(r) > 0 {
- length = r[0]
- if len(r) > 1 {
- start = r[1]
- }
- }
- q := d.books.Find(query).Sort("-_id")
- num, err = q.Count()
- if err != nil {
- return
- }
- if start != 0 {
- q = q.Skip(start)
- }
- if length != 0 {
- q = q.Limit(length)
- }
-
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most visited books
- */
-func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
- bookId, err := d.mr.GetMostVisited(num, d.stats)
- if err != nil {
- return nil, err
- }
-
- books = make([]Book, num)
- for i, id := range bookId {
- d.books.Find(bson.M{"_id": id}).One(&books[i])
- books[i].Id = bson.ObjectId(books[i].Id).Hex()
- }
- return
-}
-
-/* Get the most downloaded books
- */
-func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
- bookId, err := d.mr.GetMostDownloaded(num, d.stats)
- if err != nil {
- return nil, err
- }
-
- books = make([]Book, num)
- for i, id := range bookId {
- d.books.Find(bson.M{"_id": id}).One(&books[i])
- books[i].Id = bson.ObjectId(books[i].Id).Hex()
- }
- return
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetNewBooks(r ...int) (books []Book, num int, err error) {
- return d.GetBooks(bson.M{"$nor": []bson.M{{"active": true}}}, r...)
-}
-
-func (d *DB) BookActive(id bson.ObjectId) bool {
- var book Book
- err := d.books.Find(bson.M{"_id": id}).One(&book)
- if err != nil {
- return false
- }
- return book.Active
-}
-
-func (d *DB) GetFS(prefix string) *mgo.GridFS {
- return d.session.DB(DB_NAME).GridFS(prefix)
-}
-
-func (d *DB) GetTags(numTags int) ([]string, error) {
- return d.mr.GetTags(numTags, d.books)
-}
-
-type Visits struct {
- Date int64 "_id"
- Count int "value"
-}
-
-func (d *DB) GetHourVisits(start time.Time) ([]Visits, error) {
- return d.mr.GetHourVisits(start, d.stats)
-}
-
-func (d *DB) GetDayVisits(start time.Time) ([]Visits, error) {
- return d.mr.GetDayVisits(start, d.stats)
-}
-
-func (d *DB) GetMonthVisits(start time.Time) ([]Visits, error) {
- return d.mr.GetMonthVisits(start, d.stats)
-}
diff --git a/tools/addsize/mapreduce.go b/tools/addsize/mapreduce.go
deleted file mode 100644
index dbadd19..0000000
--- a/tools/addsize/mapreduce.go
+++ /dev/null
@@ -1,266 +0,0 @@
-package main
-
-import (
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "time"
-)
-
-type MR struct {
- meta *mgo.Collection
- tags *mgo.Collection
- visited *mgo.Collection
- downloaded *mgo.Collection
- hourly_raw *mgo.Collection
- daily_raw *mgo.Collection
- monthly_raw *mgo.Collection
- hourly *mgo.Collection
- daily *mgo.Collection
- monthly *mgo.Collection
-}
-
-func NewMR(database *mgo.Database) *MR {
- m := new(MR)
- m.meta = database.C(META_COLL)
- m.tags = database.C(TAGS_COLL)
- m.visited = database.C(VISITED_COLL)
- m.downloaded = database.C(DOWNLOADED_COLL)
- m.hourly_raw = database.C(HOURLY_VISITS_COLL + "_raw")
- m.daily_raw = database.C(DAILY_VISITS_COLL + "_raw")
- m.monthly_raw = database.C(MONTHLY_VISITS_COLL + "_raw")
- m.hourly = database.C(HOURLY_VISITS_COLL)
- m.daily = database.C(DAILY_VISITS_COLL)
- m.monthly = database.C(MONTHLY_VISITS_COLL)
- return m
-}
-
-func (m *MR) GetTags(numTags int, booksColl *mgo.Collection) ([]string, error) {
- if m.isOutdated(TAGS_COLL, MINUTES_UPDATE_TAGS) {
- var mr mgo.MapReduce
- mr.Map = `function() {
- if (this.subject) {
- this.subject.forEach(function(s) { emit(s, 1); });
- }
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- err := m.update(&mr, bson.M{"active": true}, booksColl, TAGS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Tag string "_id"
- }
- err := m.tags.Find(nil).Sort("-value").Limit(numTags).All(&result)
- if err != nil {
- return nil, err
- }
-
- tags := make([]string, len(result))
- for i, r := range result {
- tags[i] = r.Tag
- }
- return tags, nil
-}
-
-func (m *MR) GetMostVisited(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
- if m.isOutdated(VISITED_COLL, MINUTES_UPDATE_VISITED) {
- var mr mgo.MapReduce
- mr.Map = `function() {
- emit(this.id, 1);
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- err := m.update(&mr, bson.M{"section": "book"}, statsColl, VISITED_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Book bson.ObjectId "_id"
- }
- err := m.visited.Find(nil).Sort("-value").Limit(num).All(&result)
- if err != nil {
- return nil, err
- }
-
- books := make([]bson.ObjectId, len(result))
- for i, r := range result {
- books[i] = r.Book
- }
- return books, nil
-}
-
-func (m *MR) GetMostDownloaded(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
- if m.isOutdated(DOWNLOADED_COLL, MINUTES_UPDATE_DOWNLOADED) {
- var mr mgo.MapReduce
- mr.Map = `function() {
- emit(this.id, 1);
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- err := m.update(&mr, bson.M{"section": "download"}, statsColl, DOWNLOADED_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Book bson.ObjectId "_id"
- }
- err := m.downloaded.Find(nil).Sort("-value").Limit(num).All(&result)
- if err != nil {
- return nil, err
- }
-
- books := make([]bson.ObjectId, len(result))
- for i, r := range result {
- books[i] = r.Book
- }
- return books, nil
-}
-
-func (m *MR) GetHourVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
- if m.isOutdated(HOURLY_VISITS_COLL, MINUTES_UPDATE_HOURLY) {
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate(),
- this.date.getUTCHours());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, HOURLY_VISITS_COLL+"_raw")
- if err != nil {
- return nil, err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- err = m.update(&mr2, bson.M{}, m.hourly_raw, HOURLY_VISITS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []Visits
- err := m.hourly.Find(nil).All(&result)
- return result, err
-}
-
-func (m *MR) GetDayVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
- if m.isOutdated(DAILY_VISITS_COLL, MINUTES_UPDATE_DAILY) {
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, DAILY_VISITS_COLL+"_raw")
- if err != nil {
- return nil, err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- err = m.update(&mr2, bson.M{}, m.daily_raw, DAILY_VISITS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []Visits
- err := m.daily.Find(nil).All(&result)
- return result, err
-}
-
-func (m *MR) GetMonthVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
- if m.isOutdated(MONTHLY_VISITS_COLL, MINUTES_UPDATE_MONTHLY) {
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, MONTHLY_VISITS_COLL+"_raw")
- if err != nil {
- return nil, err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- err = m.update(&mr2, bson.M{}, m.monthly_raw, MONTHLY_VISITS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []Visits
- err := m.monthly.Find(nil).All(&result)
- return result, err
-}
-
-func (m *MR) update(mr *mgo.MapReduce, query bson.M, queryColl *mgo.Collection, storeColl string) error {
- _, err := m.meta.RemoveAll(bson.M{"type": storeColl})
- if err != nil {
- return err
- }
-
- mr.Out = bson.M{"replace": storeColl}
- _, err = queryColl.Find(query).MapReduce(mr, nil)
- if err != nil {
- return err
- }
-
- return m.meta.Insert(bson.M{"type": storeColl})
-}
-
-func (m *MR) isOutdated(coll string, minutes float64) bool {
- var result struct {
- Id bson.ObjectId `bson:"_id"`
- }
- err := m.meta.Find(bson.M{"type": coll}).One(&result)
- if err != nil {
- return true
- }
-
- lastUpdate := result.Id.Time()
- return time.Since(lastUpdate).Minutes() > minutes
-}
diff --git a/tools/adduser/adduser.go b/tools/adduser/adduser.go
deleted file mode 100644
index f1b9d9d..0000000
--- a/tools/adduser/adduser.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package main
-
-import (
- "code.google.com/p/gopass"
- "git.gitorious.org/trantor/trantor.git/database"
- "os"
-)
-
-const (
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
-)
-
-func main() {
- db := database.Init(DB_IP, DB_NAME)
- defer db.Close()
-
- user := os.Args[1]
- pass, err := gopass.GetPass("Password: ")
- if err != nil {
- panic(err)
- }
-
- err = db.AddUser(user, pass)
- if err != nil {
- panic(err)
- }
-}
diff --git a/tools/coverNew/config.go b/tools/coverNew/config.go
deleted file mode 100644
index f92f3a3..0000000
--- a/tools/coverNew/config.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package main
-
-const (
- PORT = "8080"
-
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- META_COLL = "meta"
- BOOKS_COLL = "books"
- TAGS_COLL = "tags"
- VISITED_COLL = "visited"
- DOWNLOADED_COLL = "downloaded"
- HOURLY_VISITS_COLL = "visits.hourly"
- DAILY_VISITS_COLL = "visits.daily"
- MONTHLY_VISITS_COLL = "visits.monthly"
- USERS_COLL = "users"
- NEWS_COLL = "news"
- STATS_COLL = "statistics"
- FS_BOOKS = "fs_books"
- FS_IMGS = "fs_imgs"
-
- PASS_SALT = "ImperialLibSalt"
- MINUTES_UPDATE_TAGS = 11
- MINUTES_UPDATE_VISITED = 41
- MINUTES_UPDATE_DOWNLOADED = 47
- MINUTES_UPDATE_HOURLY = 31
- MINUTES_UPDATE_DAILY = 60*12 + 7
- MINUTES_UPDATE_MONTHLY = 60*24 + 11
- TAGS_DISPLAY = 50
- SEARCH_ITEMS_PAGE = 20
- NEW_ITEMS_PAGE = 50
- NUM_NEWS = 10
- DAYS_NEWS_INDEXPAGE = 15
-
- TEMPLATE_PATH = "templates/"
- CSS_PATH = "css/"
- JS_PATH = "js/"
- IMG_PATH = "img/"
-
- IMG_WIDTH_BIG = 300
- IMG_WIDTH_SMALL = 60
- IMG_QUALITY = 80
-
- CHAN_SIZE = 100
-)
diff --git a/tools/coverNew/cover.go b/tools/coverNew/cover.go
deleted file mode 100644
index 7444567..0000000
--- a/tools/coverNew/cover.go
+++ /dev/null
@@ -1,204 +0,0 @@
-package main
-
-import _ "image/png"
-import _ "image/jpeg"
-import _ "image/gif"
-
-import (
- "bytes"
- "git.gitorious.org/go-pkg/epubgo.git"
- "github.com/gorilla/mux"
- "github.com/nfnt/resize"
- "image"
- "image/jpeg"
- "io"
- "io/ioutil"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "log"
- "net/http"
- "regexp"
- "strings"
-)
-
-func coverHandler(w http.ResponseWriter, r *http.Request) {
- vars := mux.Vars(r)
- if !bson.IsObjectIdHex(vars["id"]) {
- notFound(w, r)
- return
- }
- id := bson.ObjectIdHex(vars["id"])
- books, _, err := db.GetBooks(bson.M{"_id": id})
- if err != nil || len(books) == 0 {
- notFound(w, r)
- return
- }
- book := books[0]
-
- if !book.Active {
- sess := GetSession(r)
- if sess.User == "" {
- notFound(w, r)
- return
- }
- }
-
- fs := db.GetFS(FS_IMGS)
- var f *mgo.GridFile
- if vars["size"] == "small" {
- f, err = fs.OpenId(book.CoverSmall)
- } else {
- f, err = fs.OpenId(book.Cover)
- }
- if err != nil {
- log.Println("Error while opening image:", err)
- notFound(w, r)
- return
- }
- defer f.Close()
-
- headers := w.Header()
- headers["Content-Type"] = []string{"image/jpeg"}
-
- io.Copy(w, f)
-}
-
-func GetCover(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
- imgId, smallId := coverFromMetadata(e, title)
- if imgId != "" {
- return imgId, smallId
- }
-
- imgId, smallId = searchCommonCoverNames(e, title)
- if imgId != "" {
- return imgId, smallId
- }
-
- /* search for img on the text */
- exp, _ := regexp.Compile("<.*ima?g.*[(src)(href)]=[\"']([^\"']*(\\.[^\\.\"']*))[\"']")
- it, errNext := e.Spine()
- for errNext == nil {
- file, err := it.Open()
- if err != nil {
- break
- }
- defer file.Close()
-
- txt, err := ioutil.ReadAll(file)
- if err != nil {
- break
- }
- res := exp.FindSubmatch(txt)
- if res != nil {
- href := string(res[1])
- urlPart := strings.Split(it.URL(), "/")
- url := strings.Join(urlPart[:len(urlPart)-1], "/")
- if href[:3] == "../" {
- href = href[3:]
- url = strings.Join(urlPart[:len(urlPart)-2], "/")
- }
- href = strings.Replace(href, "%20", " ", -1)
- href = strings.Replace(href, "%27", "'", -1)
- href = strings.Replace(href, "%28", "(", -1)
- href = strings.Replace(href, "%29", ")", -1)
- if url == "" {
- url = href
- } else {
- url = url + "/" + href
- }
-
- img, err := e.OpenFile(url)
- if err == nil {
- defer img.Close()
- return storeImg(img, title)
- }
- }
- errNext = it.Next()
- }
- return "", ""
-}
-
-func coverFromMetadata(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
- metaList, _ := e.MetadataAttr("meta")
- for _, meta := range metaList {
- if meta["name"] == "cover" {
- img, err := e.OpenFileId(meta["content"])
- if err == nil {
- defer img.Close()
- return storeImg(img, title)
- }
- }
- }
- return "", ""
-}
-
-func searchCommonCoverNames(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
- for _, p := range []string{"cover.jpg", "Images/cover.jpg", "images/cover.jpg", "cover.jpeg", "cover1.jpg", "cover1.jpeg"} {
- img, err := e.OpenFile(p)
- if err == nil {
- defer img.Close()
- return storeImg(img, title)
- }
- }
- return "", ""
-}
-
-func storeImg(img io.Reader, title string) (bson.ObjectId, bson.ObjectId) {
- /* open the files */
- fBig, err := createCoverFile(title)
- if err != nil {
- log.Println("Error creating", title, ":", err.Error())
- return "", ""
- }
- defer fBig.Close()
-
- fSmall, err := createCoverFile(title + "_small")
- if err != nil {
- log.Println("Error creating", title+"_small", ":", err.Error())
- return "", ""
- }
- defer fSmall.Close()
-
- /* resize img */
- var img2 bytes.Buffer
- img1 := io.TeeReader(img, &img2)
- jpgOptions := jpeg.Options{IMG_QUALITY}
- imgResized, err := resizeImg(img1, IMG_WIDTH_BIG)
- if err != nil {
- log.Println("Error resizing big image:", err.Error())
- return "", ""
- }
- err = jpeg.Encode(fBig, imgResized, &jpgOptions)
- if err != nil {
- log.Println("Error encoding big image:", err.Error())
- return "", ""
- }
- imgSmallResized, err := resizeImg(&img2, IMG_WIDTH_SMALL)
- if err != nil {
- log.Println("Error resizing small image:", err.Error())
- return "", ""
- }
- err = jpeg.Encode(fSmall, imgSmallResized, &jpgOptions)
- if err != nil {
- log.Println("Error encoding small image:", err.Error())
- return "", ""
- }
-
- idBig, _ := fBig.Id().(bson.ObjectId)
- idSmall, _ := fSmall.Id().(bson.ObjectId)
- return idBig, idSmall
-}
-
-func createCoverFile(title string) (*mgo.GridFile, error) {
- fs := db.GetFS(FS_IMGS)
- return fs.Create(title + ".jpg")
-}
-
-func resizeImg(imgReader io.Reader, width uint) (image.Image, error) {
- img, _, err := image.Decode(imgReader)
- if err != nil {
- return nil, err
- }
-
- return resize.Resize(width, 0, img, resize.NearestNeighbor), nil
-}
diff --git a/tools/coverNew/coverNew.go b/tools/coverNew/coverNew.go
deleted file mode 100644
index 1931d05..0000000
--- a/tools/coverNew/coverNew.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package main
-
-import (
- "fmt"
- "labix.org/v2/mgo/bson"
- "net/http"
-)
-
-func main() {
- db = initDB()
- defer db.Close()
- books, _, _ := db.GetNewBooks()
-
- for _, book := range books {
- fmt.Println(book.Title)
- e, err := OpenBook(book.File)
- if err != nil {
- fmt.Println("================", err)
- }
-
- cover, coverSmall := GetCover(e, book.Title)
- if cover != "" {
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"cover": cover, "coversmall": coverSmall})
- }
- e.Close()
- }
-}
-
-func notFound(w http.ResponseWriter, r *http.Request) {
- // cover.go needs this function to compile
-}
diff --git a/tools/coverNew/database.go b/tools/coverNew/database.go
deleted file mode 100644
index 0dc0eec..0000000
--- a/tools/coverNew/database.go
+++ /dev/null
@@ -1,230 +0,0 @@
-package main
-
-import (
- "crypto/md5"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "time"
-)
-
-var db *DB
-
-type Book struct {
- Id string `bson:"_id"`
- Title string
- Author []string
- Contributor string
- Publisher string
- Description string
- Subject []string
- Date string
- Lang []string
- Isbn string
- Type string
- Format string
- Source string
- Relation string
- Coverage string
- Rights string
- Meta string
- File bson.ObjectId
- Cover bson.ObjectId
- CoverSmall bson.ObjectId
- Active bool
- Keywords []string
-}
-
-type News struct {
- Date time.Time
- Text string
-}
-
-type DB struct {
- session *mgo.Session
- books *mgo.Collection
- user *mgo.Collection
- news *mgo.Collection
- stats *mgo.Collection
- mr *MR
-}
-
-func initDB() *DB {
- var err error
- d := new(DB)
- d.session, err = mgo.Dial(DB_IP)
- if err != nil {
- panic(err)
- }
-
- database := d.session.DB(DB_NAME)
- d.books = database.C(BOOKS_COLL)
- d.user = database.C(USERS_COLL)
- d.news = database.C(NEWS_COLL)
- d.stats = database.C(STATS_COLL)
- d.mr = NewMR(database)
- return d
-}
-
-func (d *DB) Close() {
- d.session.Close()
-}
-
-func md5Pass(pass string) []byte {
- h := md5.New()
- hash := h.Sum(([]byte)(PASS_SALT + pass))
- return hash
-}
-
-func (d *DB) SetPassword(user string, pass string) error {
- hash := md5Pass(pass)
- return d.user.Update(bson.M{"user": user}, bson.M{"$set": bson.M{"pass": hash}})
-}
-
-func (d *DB) UserValid(user string, pass string) bool {
- hash := md5Pass(pass)
- n, err := d.user.Find(bson.M{"user": user, "pass": hash}).Count()
- if err != nil {
- return false
- }
- return n != 0
-}
-
-func (d *DB) AddNews(text string) error {
- var news News
- news.Text = text
- news.Date = time.Now()
- return d.news.Insert(news)
-}
-
-func (d *DB) GetNews(num int, days int) (news []News, err error) {
- query := bson.M{}
- if days != 0 {
- duration := time.Duration(-24*days) * time.Hour
- date := time.Now().Add(duration)
- query = bson.M{"date": bson.M{"$gt": date}}
- }
- q := d.news.Find(query).Sort("-date").Limit(num)
- err = q.All(&news)
- return
-}
-
-func (d *DB) InsertStats(stats interface{}) error {
- return d.stats.Insert(stats)
-}
-
-func (d *DB) InsertBook(book interface{}) error {
- return d.books.Insert(book)
-}
-
-func (d *DB) RemoveBook(id bson.ObjectId) error {
- return d.books.Remove(bson.M{"_id": id})
-}
-
-func (d *DB) UpdateBook(id bson.ObjectId, data interface{}) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data})
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
- var start, length int
- if len(r) > 0 {
- length = r[0]
- if len(r) > 1 {
- start = r[1]
- }
- }
- q := d.books.Find(query).Sort("-_id")
- num, err = q.Count()
- if err != nil {
- return
- }
- if start != 0 {
- q = q.Skip(start)
- }
- if length != 0 {
- q = q.Limit(length)
- }
-
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most visited books
- */
-func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
- bookId, err := d.mr.GetMostVisited(num, d.stats)
- if err != nil {
- return nil, err
- }
-
- books = make([]Book, num)
- for i, id := range bookId {
- d.books.Find(bson.M{"_id": id}).One(&books[i])
- books[i].Id = bson.ObjectId(books[i].Id).Hex()
- }
- return
-}
-
-/* Get the most downloaded books
- */
-func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
- bookId, err := d.mr.GetMostDownloaded(num, d.stats)
- if err != nil {
- return nil, err
- }
-
- books = make([]Book, num)
- for i, id := range bookId {
- d.books.Find(bson.M{"_id": id}).One(&books[i])
- books[i].Id = bson.ObjectId(books[i].Id).Hex()
- }
- return
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetNewBooks(r ...int) (books []Book, num int, err error) {
- return d.GetBooks(bson.M{"$nor": []bson.M{{"active": true}}}, r...)
-}
-
-func (d *DB) BookActive(id bson.ObjectId) bool {
- var book Book
- err := d.books.Find(bson.M{"_id": id}).One(&book)
- if err != nil {
- return false
- }
- return book.Active
-}
-
-func (d *DB) GetFS(prefix string) *mgo.GridFS {
- return d.session.DB(DB_NAME).GridFS(prefix)
-}
-
-func (d *DB) GetTags(numTags int) ([]string, error) {
- return d.mr.GetTags(numTags, d.books)
-}
-
-type Visits struct {
- Date int64 "_id"
- Count int "value"
-}
-
-func (d *DB) GetHourVisits(start time.Time) ([]Visits, error) {
- return d.mr.GetHourVisits(start, d.stats)
-}
-
-func (d *DB) GetDayVisits(start time.Time) ([]Visits, error) {
- return d.mr.GetDayVisits(start, d.stats)
-}
-
-func (d *DB) GetMonthVisits(start time.Time) ([]Visits, error) {
- return d.mr.GetMonthVisits(start, d.stats)
-}
diff --git a/tools/coverNew/mapreduce.go b/tools/coverNew/mapreduce.go
deleted file mode 100644
index dbadd19..0000000
--- a/tools/coverNew/mapreduce.go
+++ /dev/null
@@ -1,266 +0,0 @@
-package main
-
-import (
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "time"
-)
-
-type MR struct {
- meta *mgo.Collection
- tags *mgo.Collection
- visited *mgo.Collection
- downloaded *mgo.Collection
- hourly_raw *mgo.Collection
- daily_raw *mgo.Collection
- monthly_raw *mgo.Collection
- hourly *mgo.Collection
- daily *mgo.Collection
- monthly *mgo.Collection
-}
-
-func NewMR(database *mgo.Database) *MR {
- m := new(MR)
- m.meta = database.C(META_COLL)
- m.tags = database.C(TAGS_COLL)
- m.visited = database.C(VISITED_COLL)
- m.downloaded = database.C(DOWNLOADED_COLL)
- m.hourly_raw = database.C(HOURLY_VISITS_COLL + "_raw")
- m.daily_raw = database.C(DAILY_VISITS_COLL + "_raw")
- m.monthly_raw = database.C(MONTHLY_VISITS_COLL + "_raw")
- m.hourly = database.C(HOURLY_VISITS_COLL)
- m.daily = database.C(DAILY_VISITS_COLL)
- m.monthly = database.C(MONTHLY_VISITS_COLL)
- return m
-}
-
-func (m *MR) GetTags(numTags int, booksColl *mgo.Collection) ([]string, error) {
- if m.isOutdated(TAGS_COLL, MINUTES_UPDATE_TAGS) {
- var mr mgo.MapReduce
- mr.Map = `function() {
- if (this.subject) {
- this.subject.forEach(function(s) { emit(s, 1); });
- }
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- err := m.update(&mr, bson.M{"active": true}, booksColl, TAGS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Tag string "_id"
- }
- err := m.tags.Find(nil).Sort("-value").Limit(numTags).All(&result)
- if err != nil {
- return nil, err
- }
-
- tags := make([]string, len(result))
- for i, r := range result {
- tags[i] = r.Tag
- }
- return tags, nil
-}
-
-func (m *MR) GetMostVisited(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
- if m.isOutdated(VISITED_COLL, MINUTES_UPDATE_VISITED) {
- var mr mgo.MapReduce
- mr.Map = `function() {
- emit(this.id, 1);
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- err := m.update(&mr, bson.M{"section": "book"}, statsColl, VISITED_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Book bson.ObjectId "_id"
- }
- err := m.visited.Find(nil).Sort("-value").Limit(num).All(&result)
- if err != nil {
- return nil, err
- }
-
- books := make([]bson.ObjectId, len(result))
- for i, r := range result {
- books[i] = r.Book
- }
- return books, nil
-}
-
-func (m *MR) GetMostDownloaded(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
- if m.isOutdated(DOWNLOADED_COLL, MINUTES_UPDATE_DOWNLOADED) {
- var mr mgo.MapReduce
- mr.Map = `function() {
- emit(this.id, 1);
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- err := m.update(&mr, bson.M{"section": "download"}, statsColl, DOWNLOADED_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Book bson.ObjectId "_id"
- }
- err := m.downloaded.Find(nil).Sort("-value").Limit(num).All(&result)
- if err != nil {
- return nil, err
- }
-
- books := make([]bson.ObjectId, len(result))
- for i, r := range result {
- books[i] = r.Book
- }
- return books, nil
-}
-
-func (m *MR) GetHourVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
- if m.isOutdated(HOURLY_VISITS_COLL, MINUTES_UPDATE_HOURLY) {
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate(),
- this.date.getUTCHours());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, HOURLY_VISITS_COLL+"_raw")
- if err != nil {
- return nil, err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- err = m.update(&mr2, bson.M{}, m.hourly_raw, HOURLY_VISITS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []Visits
- err := m.hourly.Find(nil).All(&result)
- return result, err
-}
-
-func (m *MR) GetDayVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
- if m.isOutdated(DAILY_VISITS_COLL, MINUTES_UPDATE_DAILY) {
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, DAILY_VISITS_COLL+"_raw")
- if err != nil {
- return nil, err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- err = m.update(&mr2, bson.M{}, m.daily_raw, DAILY_VISITS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []Visits
- err := m.daily.Find(nil).All(&result)
- return result, err
-}
-
-func (m *MR) GetMonthVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
- if m.isOutdated(MONTHLY_VISITS_COLL, MINUTES_UPDATE_MONTHLY) {
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, MONTHLY_VISITS_COLL+"_raw")
- if err != nil {
- return nil, err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- err = m.update(&mr2, bson.M{}, m.monthly_raw, MONTHLY_VISITS_COLL)
- if err != nil {
- return nil, err
- }
- }
-
- var result []Visits
- err := m.monthly.Find(nil).All(&result)
- return result, err
-}
-
-func (m *MR) update(mr *mgo.MapReduce, query bson.M, queryColl *mgo.Collection, storeColl string) error {
- _, err := m.meta.RemoveAll(bson.M{"type": storeColl})
- if err != nil {
- return err
- }
-
- mr.Out = bson.M{"replace": storeColl}
- _, err = queryColl.Find(query).MapReduce(mr, nil)
- if err != nil {
- return err
- }
-
- return m.meta.Insert(bson.M{"type": storeColl})
-}
-
-func (m *MR) isOutdated(coll string, minutes float64) bool {
- var result struct {
- Id bson.ObjectId `bson:"_id"`
- }
- err := m.meta.Find(bson.M{"type": coll}).One(&result)
- if err != nil {
- return true
- }
-
- lastUpdate := result.Id.Time()
- return time.Since(lastUpdate).Minutes() > minutes
-}
diff --git a/tools/coverNew/session.go b/tools/coverNew/session.go
deleted file mode 100644
index bf861fc..0000000
--- a/tools/coverNew/session.go
+++ /dev/null
@@ -1,76 +0,0 @@
-package main
-
-import (
- "encoding/hex"
- "github.com/gorilla/securecookie"
- "github.com/gorilla/sessions"
- "net/http"
-)
-
-var sesStore = sessions.NewCookieStore(securecookie.GenerateRandomKey(64))
-
-type Notification struct {
- Title string
- Msg string
- Type string /* error, info or success */
-}
-
-type Session struct {
- User string
- Notif []Notification
- S *sessions.Session
-}
-
-func getNotif(session *sessions.Session) []Notification {
- msgs := session.Flashes("nMsg")
- titles := session.Flashes("nTitle")
- tpes := session.Flashes("nType")
- notif := make([]Notification, len(msgs))
- for i, m := range msgs {
- msg, _ := m.(string)
- title, _ := titles[i].(string)
- tpe, _ := tpes[i].(string)
- notif[i] = Notification{title, msg, tpe}
- }
- return notif
-}
-
-func GetSession(r *http.Request) (s *Session) {
- s = new(Session)
- var err error
- s.S, err = sesStore.Get(r, "session")
- if err == nil && !s.S.IsNew {
- s.User, _ = s.S.Values["user"].(string)
- s.Notif = getNotif(s.S)
- }
-
- if s.S.IsNew {
- s.S.Values["id"] = hex.EncodeToString(securecookie.GenerateRandomKey(16))
- }
-
- return
-}
-
-func (s *Session) LogIn(user string) {
- s.User = user
- s.S.Values["user"] = user
-}
-
-func (s *Session) LogOut() {
- s.S.Values["user"] = ""
-}
-
-func (s *Session) Notify(title, msg, tpe string) {
- s.S.AddFlash(msg, "nMsg")
- s.S.AddFlash(title, "nTitle")
- s.S.AddFlash(tpe, "nType")
-}
-
-func (s *Session) Save(w http.ResponseWriter, r *http.Request) {
- sesStore.Save(r, w, s.S)
-}
-
-func (s *Session) Id() string {
- id, _ := s.S.Values["id"].(string)
- return id
-}
diff --git a/tools/coverNew/store.go b/tools/coverNew/store.go
deleted file mode 100644
index e50e65f..0000000
--- a/tools/coverNew/store.go
+++ /dev/null
@@ -1,128 +0,0 @@
-package main
-
-import (
- "bytes"
- "git.gitorious.org/go-pkg/epubgo.git"
- "io"
- "io/ioutil"
- "labix.org/v2/mgo/bson"
- "regexp"
- "strings"
-)
-
-func OpenBook(id bson.ObjectId) (*epubgo.Epub, error) {
- fs := db.GetFS(FS_BOOKS)
- f, err := fs.OpenId(id)
- if err != nil {
- return nil, err
- }
- defer f.Close()
-
- buff, err := ioutil.ReadAll(f)
- reader := bytes.NewReader(buff)
-
- return epubgo.Load(reader, int64(len(buff)))
-}
-
-func StoreNewFile(name string, file io.Reader) (bson.ObjectId, error) {
- fs := db.GetFS(FS_BOOKS)
- fw, err := fs.Create(name)
- if err != nil {
- return "", err
- }
- defer fw.Close()
-
- _, err = io.Copy(fw, file)
- id, _ := fw.Id().(bson.ObjectId)
- return id, err
-}
-
-func DeleteFile(id bson.ObjectId) error {
- fs := db.GetFS(FS_BOOKS)
- return fs.RemoveId(id)
-}
-
-func DeleteCover(id bson.ObjectId) error {
- fs := db.GetFS(FS_IMGS)
- return fs.RemoveId(id)
-}
-
-func DeleteBook(book Book) {
- if book.Cover != "" {
- DeleteCover(book.Cover)
- }
- if book.CoverSmall != "" {
- DeleteCover(book.CoverSmall)
- }
- DeleteFile(book.File)
-}
-
-func cleanStr(str string) string {
- str = strings.Replace(str, "'", "'", -1)
- exp, _ := regexp.Compile("&[^;]*;")
- str = exp.ReplaceAllString(str, "")
- exp, _ = regexp.Compile("[ ,]*$")
- str = exp.ReplaceAllString(str, "")
- return str
-}
-
-func parseAuthr(creator []string) []string {
- exp1, _ := regexp.Compile("^(.*\\( *([^\\)]*) *\\))*$")
- exp2, _ := regexp.Compile("^[^:]*: *(.*)$")
- res := make([]string, len(creator))
- for i, s := range creator {
- auth := exp1.FindStringSubmatch(s)
- if auth != nil {
- res[i] = cleanStr(strings.Join(auth[2:], ", "))
- } else {
- auth := exp2.FindStringSubmatch(s)
- if auth != nil {
- res[i] = cleanStr(auth[1])
- } else {
- res[i] = cleanStr(s)
- }
- }
- }
- return res
-}
-
-func parseDescription(description []string) string {
- str := cleanStr(strings.Join(description, "\n"))
- str = strings.Replace(str, "
", "\n", -1)
- exp, _ := regexp.Compile("<[^>]*>")
- str = exp.ReplaceAllString(str, "")
- str = strings.Replace(str, "&", "&", -1)
- str = strings.Replace(str, "<", "<", -1)
- str = strings.Replace(str, ">", ">", -1)
- str = strings.Replace(str, "\\n", "\n", -1)
- return str
-}
-
-func parseSubject(subject []string) []string {
- var res []string
- for _, s := range subject {
- res = append(res, strings.Split(s, " / ")...)
- }
- return res
-}
-
-func parseDate(date []string) string {
- if len(date) == 0 {
- return ""
- }
- return strings.Replace(date[0], "Unspecified: ", "", -1)
-}
-
-func keywords(b map[string]interface{}) (k []string) {
- title, _ := b["title"].(string)
- k = strings.Split(title, " ")
- author, _ := b["author"].([]string)
- for _, a := range author {
- k = append(k, strings.Split(a, " ")...)
- }
- publisher, _ := b["publisher"].(string)
- k = append(k, strings.Split(publisher, " ")...)
- subject, _ := b["subject"].([]string)
- k = append(k, subject...)
- return
-}
diff --git a/tools/getISBNnDesc/config.go b/tools/getISBNnDesc/config.go
deleted file mode 100644
index c144607..0000000
--- a/tools/getISBNnDesc/config.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package main
-
-const (
- PORT = "8080"
-
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- META_COLL = "meta"
- BOOKS_COLL = "books"
- TAGS_COLL = "tags"
- USERS_COLL = "users"
- STATS_COLL = "statistics"
- FS_BOOKS = "fs_books"
- FS_IMGS = "fs_imgs"
-
- PASS_SALT = "ImperialLibSalt"
- MINUTES_UPDATE_TAGS = 10
- TAGS_DISPLAY = 50
- SEARCH_ITEMS_PAGE = 20
- NEW_ITEMS_PAGE = 50
-
- TEMPLATE_PATH = "templates/"
- CSS_PATH = "css/"
- JS_PATH = "js/"
- IMG_PATH = "img/"
-
- IMG_WIDTH_BIG = 300
- IMG_WIDTH_SMALL = 60
- IMG_QUALITY = 80
-
- CHAN_SIZE = 100
-)
diff --git a/tools/getISBNnDesc/database.go b/tools/getISBNnDesc/database.go
deleted file mode 100644
index 32003cc..0000000
--- a/tools/getISBNnDesc/database.go
+++ /dev/null
@@ -1,248 +0,0 @@
-package main
-
-import (
- "crypto/md5"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "time"
-)
-
-const (
- META_TYPE_TAGS = "tags updated"
-)
-
-var db *DB
-
-type Book struct {
- Id string `bson:"_id"`
- Title string
- Author []string
- Contributor string
- Publisher string
- Description string
- Subject []string
- Date string
- Lang []string
- Isbn string
- Type string
- Format string
- Source string
- Relation string
- Coverage string
- Rights string
- Meta string
- File bson.ObjectId
- Cover bson.ObjectId
- CoverSmall bson.ObjectId
- Active bool
- Keywords []string
-}
-
-type DB struct {
- session *mgo.Session
- meta *mgo.Collection
- books *mgo.Collection
- tags *mgo.Collection
- user *mgo.Collection
- stats *mgo.Collection
-}
-
-func initDB() *DB {
- var err error
- d := new(DB)
- d.session, err = mgo.Dial(DB_IP)
- if err != nil {
- panic(err)
- }
-
- database := d.session.DB(DB_NAME)
- d.meta = database.C(META_COLL)
- d.books = database.C(BOOKS_COLL)
- d.tags = database.C(TAGS_COLL)
- d.user = database.C(USERS_COLL)
- d.stats = database.C(STATS_COLL)
- return d
-}
-
-func (d *DB) Close() {
- d.session.Close()
-}
-
-func md5Pass(pass string) []byte {
- h := md5.New()
- hash := h.Sum(([]byte)(PASS_SALT + pass))
- return hash
-}
-
-func (d *DB) SetPassword(user string, pass string) error {
- hash := md5Pass(pass)
- return d.user.Update(bson.M{"user": user}, bson.M{"$set": bson.M{"pass": hash}})
-}
-
-func (d *DB) UserValid(user string, pass string) bool {
- hash := md5Pass(pass)
- n, err := d.user.Find(bson.M{"user": user, "pass": hash}).Count()
- if err != nil {
- return false
- }
- return n != 0
-}
-
-func (d *DB) InsertStats(stats interface{}) error {
- return d.stats.Insert(stats)
-}
-
-func (d *DB) InsertBook(book interface{}) error {
- return d.books.Insert(book)
-}
-
-func (d *DB) RemoveBook(id bson.ObjectId) error {
- return d.books.Remove(bson.M{"_id": id})
-}
-
-func (d *DB) UpdateBook(id bson.ObjectId, data interface{}) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data})
-}
-
-func (d *DB) IncVisit(id bson.ObjectId) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$inc": bson.M{"VisitsCount": 1}})
-}
-
-func (d *DB) IncDownload(id bson.ObjectId) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$inc": bson.M{"DownloadCount": 1}})
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
- var start, length int
- if len(r) > 0 {
- length = r[0]
- if len(r) > 1 {
- start = r[1]
- }
- }
- q := d.books.Find(query).Sort("-_id")
- num, err = q.Count()
- if err != nil {
- return
- }
- if start != 0 {
- q = q.Skip(start)
- }
- if length != 0 {
- q = q.Limit(length)
- }
-
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most visited books
- */
-func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
- var q *mgo.Query
- q = d.books.Find(bson.M{"active": true}).Sort("-VisitsCount").Limit(num)
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most downloaded books
- */
-func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
- var q *mgo.Query
- q = d.books.Find(bson.M{"active": true}).Sort("-DownloadCount").Limit(num)
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetNewBooks(r ...int) (books []Book, num int, err error) {
- return d.GetBooks(bson.M{"$nor": []bson.M{{"active": true}}}, r...)
-}
-
-func (d *DB) BookActive(id bson.ObjectId) bool {
- var book Book
- err := d.books.Find(bson.M{"_id": id}).One(&book)
- if err != nil {
- return false
- }
- return book.Active
-}
-
-func (d *DB) GetFS(prefix string) *mgo.GridFS {
- return d.session.DB(DB_NAME).GridFS(prefix)
-}
-
-func (d *DB) areTagsOutdated() bool {
- var result struct {
- Id bson.ObjectId `bson:"_id"`
- }
- err := d.meta.Find(bson.M{"type": META_TYPE_TAGS}).One(&result)
- if err != nil {
- return true
- }
-
- lastUpdate := result.Id.Time()
- return time.Since(lastUpdate).Minutes() > MINUTES_UPDATE_TAGS
-}
-
-func (d *DB) updateTags() error {
- _, err := d.meta.RemoveAll(bson.M{"type": META_TYPE_TAGS})
- if err != nil {
- return err
- }
-
- var mr mgo.MapReduce
- mr.Map = "function() { " +
- "if (this.active) { this.subject.forEach(function(s) { emit(s, 1); }); }" +
- "}"
- mr.Reduce = "function(tag, vals) { " +
- "var count = 0;" +
- "vals.forEach(function() { count += 1; });" +
- "return count;" +
- "}"
- mr.Out = bson.M{"replace": TAGS_COLL}
- _, err = d.books.Find(bson.M{"active": true}).MapReduce(&mr, nil)
- if err != nil {
- return err
- }
-
- return d.meta.Insert(bson.M{"type": META_TYPE_TAGS})
-}
-
-func (d *DB) GetTags(numTags int) ([]string, error) {
- if d.areTagsOutdated() {
- err := d.updateTags()
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Tag string "_id"
- }
- err := d.tags.Find(nil).Sort("-value").Limit(numTags).All(&result)
- if err != nil {
- return nil, err
- }
- tags := make([]string, len(result))
- for i, r := range result {
- tags[i] = r.Tag
- }
- return tags, nil
-}
diff --git a/tools/getISBNnDesc/get.go b/tools/getISBNnDesc/get.go
deleted file mode 100644
index 0875524..0000000
--- a/tools/getISBNnDesc/get.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package main
-
-import (
- "fmt"
- "git.gitorious.org/go-pkg/epubgo.git"
- "labix.org/v2/mgo/bson"
-)
-
-func main() {
- db = initDB()
- defer db.Close()
- books, _, _ := db.GetBooks(bson.M{})
-
- for _, book := range books {
- fmt.Println(book.Title)
- e, err := OpenBook(book.File)
- if err != nil {
- fmt.Println("================", err)
- continue
- }
-
- updateISBN(e, book)
- updateDescription(e, book)
- e.Close()
- }
-}
-
-func updateISBN(e *epubgo.Epub, book Book) {
- attr, err := e.MetadataAttr("identifier")
- if err != nil {
- fmt.Println("isbn ================", err)
- return
- }
- data, err := e.Metadata("identifier")
- if err != nil {
- fmt.Println("isbn ================", err)
- return
- }
- var isbn string
- for i, d := range data {
- if attr[i]["scheme"] == "ISBN" {
- isbn = d
- }
- }
-
- if isbn != "" {
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"isbn": isbn})
- }
-}
-
-func updateDescription(e *epubgo.Epub, book Book) {
- descList, err := e.Metadata("description")
- if err != nil {
- fmt.Println("desc ================", err)
- return
- }
- description := parseDescription(descList)
- if len(description) < 10 {
- return
- }
-
- if len(book.Description) < 10 || book.Description[:10] == description[:10] {
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"description": description})
- }
-}
diff --git a/tools/getISBNnDesc/store.go b/tools/getISBNnDesc/store.go
deleted file mode 100644
index e50e65f..0000000
--- a/tools/getISBNnDesc/store.go
+++ /dev/null
@@ -1,128 +0,0 @@
-package main
-
-import (
- "bytes"
- "git.gitorious.org/go-pkg/epubgo.git"
- "io"
- "io/ioutil"
- "labix.org/v2/mgo/bson"
- "regexp"
- "strings"
-)
-
-func OpenBook(id bson.ObjectId) (*epubgo.Epub, error) {
- fs := db.GetFS(FS_BOOKS)
- f, err := fs.OpenId(id)
- if err != nil {
- return nil, err
- }
- defer f.Close()
-
- buff, err := ioutil.ReadAll(f)
- reader := bytes.NewReader(buff)
-
- return epubgo.Load(reader, int64(len(buff)))
-}
-
-func StoreNewFile(name string, file io.Reader) (bson.ObjectId, error) {
- fs := db.GetFS(FS_BOOKS)
- fw, err := fs.Create(name)
- if err != nil {
- return "", err
- }
- defer fw.Close()
-
- _, err = io.Copy(fw, file)
- id, _ := fw.Id().(bson.ObjectId)
- return id, err
-}
-
-func DeleteFile(id bson.ObjectId) error {
- fs := db.GetFS(FS_BOOKS)
- return fs.RemoveId(id)
-}
-
-func DeleteCover(id bson.ObjectId) error {
- fs := db.GetFS(FS_IMGS)
- return fs.RemoveId(id)
-}
-
-func DeleteBook(book Book) {
- if book.Cover != "" {
- DeleteCover(book.Cover)
- }
- if book.CoverSmall != "" {
- DeleteCover(book.CoverSmall)
- }
- DeleteFile(book.File)
-}
-
-func cleanStr(str string) string {
- str = strings.Replace(str, "'", "'", -1)
- exp, _ := regexp.Compile("&[^;]*;")
- str = exp.ReplaceAllString(str, "")
- exp, _ = regexp.Compile("[ ,]*$")
- str = exp.ReplaceAllString(str, "")
- return str
-}
-
-func parseAuthr(creator []string) []string {
- exp1, _ := regexp.Compile("^(.*\\( *([^\\)]*) *\\))*$")
- exp2, _ := regexp.Compile("^[^:]*: *(.*)$")
- res := make([]string, len(creator))
- for i, s := range creator {
- auth := exp1.FindStringSubmatch(s)
- if auth != nil {
- res[i] = cleanStr(strings.Join(auth[2:], ", "))
- } else {
- auth := exp2.FindStringSubmatch(s)
- if auth != nil {
- res[i] = cleanStr(auth[1])
- } else {
- res[i] = cleanStr(s)
- }
- }
- }
- return res
-}
-
-func parseDescription(description []string) string {
- str := cleanStr(strings.Join(description, "\n"))
- str = strings.Replace(str, "", "\n", -1)
- exp, _ := regexp.Compile("<[^>]*>")
- str = exp.ReplaceAllString(str, "")
- str = strings.Replace(str, "&", "&", -1)
- str = strings.Replace(str, "<", "<", -1)
- str = strings.Replace(str, ">", ">", -1)
- str = strings.Replace(str, "\\n", "\n", -1)
- return str
-}
-
-func parseSubject(subject []string) []string {
- var res []string
- for _, s := range subject {
- res = append(res, strings.Split(s, " / ")...)
- }
- return res
-}
-
-func parseDate(date []string) string {
- if len(date) == 0 {
- return ""
- }
- return strings.Replace(date[0], "Unspecified: ", "", -1)
-}
-
-func keywords(b map[string]interface{}) (k []string) {
- title, _ := b["title"].(string)
- k = strings.Split(title, " ")
- author, _ := b["author"].([]string)
- for _, a := range author {
- k = append(k, strings.Split(a, " ")...)
- }
- publisher, _ := b["publisher"].(string)
- k = append(k, strings.Split(publisher, " ")...)
- subject, _ := b["subject"].([]string)
- k = append(k, subject...)
- return
-}
diff --git a/tools/importer/config.go b/tools/importer/config.go
deleted file mode 100644
index 6099708..0000000
--- a/tools/importer/config.go
+++ /dev/null
@@ -1,53 +0,0 @@
-package main
-
-const (
- PORT = "8080"
-
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- META_COLL = "meta"
- BOOKS_COLL = "books"
- TAGS_COLL = "tags"
- VISITED_COLL = "visited"
- DOWNLOADED_COLL = "downloaded"
- HOURLY_VISITS_COLL = "visits.hourly"
- DAILY_VISITS_COLL = "visits.daily"
- MONTHLY_VISITS_COLL = "visits.monthly"
- HOURLY_DOWNLOADS_COLL = "downloads.hourly"
- DAILY_DOWNLOADS_COLL = "downloads.daily"
- MONTHLY_DOWNLOADS_COLL = "downloads.monthly"
- USERS_COLL = "users"
- NEWS_COLL = "news"
- STATS_COLL = "statistics"
- FS_BOOKS = "fs_books"
- FS_IMGS = "fs_imgs"
-
- PASS_SALT = "ImperialLibSalt"
- MINUTES_UPDATE_TAGS = 11
- MINUTES_UPDATE_VISITED = 41
- MINUTES_UPDATE_DOWNLOADED = 47
- MINUTES_UPDATE_HOURLY_V = 31
- MINUTES_UPDATE_DAILY_V = 60*12 + 7
- MINUTES_UPDATE_MONTHLY_V = 60*24 + 11
- MINUTES_UPDATE_HOURLY_D = 29
- MINUTES_UPDATE_DAILY_D = 60*12 + 13
- MINUTES_UPDATE_MONTHLY_D = 60*24 + 17
- MINUTES_UPDATE_LOGGER = 5
- TAGS_DISPLAY = 50
- SEARCH_ITEMS_PAGE = 20
- NEW_ITEMS_PAGE = 50
- NUM_NEWS = 10
- DAYS_NEWS_INDEXPAGE = 15
-
- TEMPLATE_PATH = "templates/"
- CSS_PATH = "css/"
- JS_PATH = "js/"
- IMG_PATH = "img/"
- LOGGER_CONFIG = "logger.xml"
-
- IMG_WIDTH_BIG = 300
- IMG_WIDTH_SMALL = 60
- IMG_QUALITY = 80
-
- CHAN_SIZE = 100
-)
diff --git a/tools/importer/cover.go b/tools/importer/cover.go
deleted file mode 100644
index c6e3bae..0000000
--- a/tools/importer/cover.go
+++ /dev/null
@@ -1,202 +0,0 @@
-package main
-
-import log "github.com/cihub/seelog"
-import _ "image/png"
-import _ "image/jpeg"
-import _ "image/gif"
-
-import (
- "bytes"
- "git.gitorious.org/go-pkg/epubgo.git"
- "github.com/gorilla/mux"
- "github.com/nfnt/resize"
- "image"
- "image/jpeg"
- "io"
- "io/ioutil"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "regexp"
- "strings"
-)
-
-func coverHandler(h handler) {
- vars := mux.Vars(h.r)
- if !bson.IsObjectIdHex(vars["id"]) {
- notFound(h)
- return
- }
- id := bson.ObjectIdHex(vars["id"])
- books, _, err := h.db.GetBooks(bson.M{"_id": id})
- if err != nil || len(books) == 0 {
- notFound(h)
- return
- }
- book := books[0]
-
- if !book.Active {
- if !h.sess.IsAdmin() {
- notFound(h)
- return
- }
- }
-
- fs := h.db.GetFS(FS_IMGS)
- var f *mgo.GridFile
- if vars["size"] == "small" {
- f, err = fs.OpenId(book.CoverSmall)
- } else {
- f, err = fs.OpenId(book.Cover)
- }
- if err != nil {
- log.Error("Error while opening image: ", err)
- notFound(h)
- return
- }
- defer f.Close()
-
- headers := h.w.Header()
- headers["Content-Type"] = []string{"image/jpeg"}
-
- io.Copy(h.w, f)
-}
-
-func GetCover(e *epubgo.Epub, title string, db *DB) (bson.ObjectId, bson.ObjectId) {
- imgId, smallId := coverFromMetadata(e, title, db)
- if imgId != "" {
- return imgId, smallId
- }
-
- imgId, smallId = searchCommonCoverNames(e, title, db)
- if imgId != "" {
- return imgId, smallId
- }
-
- /* search for img on the text */
- exp, _ := regexp.Compile("<.*ima?g.*[(src)(href)]=[\"']([^\"']*(\\.[^\\.\"']*))[\"']")
- it, errNext := e.Spine()
- for errNext == nil {
- file, err := it.Open()
- if err != nil {
- break
- }
- defer file.Close()
-
- txt, err := ioutil.ReadAll(file)
- if err != nil {
- break
- }
- res := exp.FindSubmatch(txt)
- if res != nil {
- href := string(res[1])
- urlPart := strings.Split(it.URL(), "/")
- url := strings.Join(urlPart[:len(urlPart)-1], "/")
- if href[:3] == "../" {
- href = href[3:]
- url = strings.Join(urlPart[:len(urlPart)-2], "/")
- }
- href = strings.Replace(href, "%20", " ", -1)
- href = strings.Replace(href, "%27", "'", -1)
- href = strings.Replace(href, "%28", "(", -1)
- href = strings.Replace(href, "%29", ")", -1)
- if url == "" {
- url = href
- } else {
- url = url + "/" + href
- }
-
- img, err := e.OpenFile(url)
- if err == nil {
- defer img.Close()
- return storeImg(img, title, db)
- }
- }
- errNext = it.Next()
- }
- return "", ""
-}
-
-func coverFromMetadata(e *epubgo.Epub, title string, db *DB) (bson.ObjectId, bson.ObjectId) {
- metaList, _ := e.MetadataAttr("meta")
- for _, meta := range metaList {
- if meta["name"] == "cover" {
- img, err := e.OpenFileId(meta["content"])
- if err == nil {
- defer img.Close()
- return storeImg(img, title, db)
- }
- }
- }
- return "", ""
-}
-
-func searchCommonCoverNames(e *epubgo.Epub, title string, db *DB) (bson.ObjectId, bson.ObjectId) {
- for _, p := range []string{"cover.jpg", "Images/cover.jpg", "images/cover.jpg", "cover.jpeg", "cover1.jpg", "cover1.jpeg"} {
- img, err := e.OpenFile(p)
- if err == nil {
- defer img.Close()
- return storeImg(img, title, db)
- }
- }
- return "", ""
-}
-
-func storeImg(img io.Reader, title string, db *DB) (bson.ObjectId, bson.ObjectId) {
- /* open the files */
- fBig, err := createCoverFile(title, db)
- if err != nil {
- log.Error("Error creating ", title, ": ", err.Error())
- return "", ""
- }
- defer fBig.Close()
-
- fSmall, err := createCoverFile(title+"_small", db)
- if err != nil {
- log.Error("Error creating ", title+"_small", ": ", err.Error())
- return "", ""
- }
- defer fSmall.Close()
-
- /* resize img */
- var img2 bytes.Buffer
- img1 := io.TeeReader(img, &img2)
- jpgOptions := jpeg.Options{IMG_QUALITY}
- imgResized, err := resizeImg(img1, IMG_WIDTH_BIG)
- if err != nil {
- log.Error("Error resizing big image: ", err.Error())
- return "", ""
- }
- err = jpeg.Encode(fBig, imgResized, &jpgOptions)
- if err != nil {
- log.Error("Error encoding big image: ", err.Error())
- return "", ""
- }
- imgSmallResized, err := resizeImg(&img2, IMG_WIDTH_SMALL)
- if err != nil {
- log.Error("Error resizing small image: ", err.Error())
- return "", ""
- }
- err = jpeg.Encode(fSmall, imgSmallResized, &jpgOptions)
- if err != nil {
- log.Error("Error encoding small image: ", err.Error())
- return "", ""
- }
-
- idBig, _ := fBig.Id().(bson.ObjectId)
- idSmall, _ := fSmall.Id().(bson.ObjectId)
- return idBig, idSmall
-}
-
-func createCoverFile(title string, db *DB) (*mgo.GridFile, error) {
- fs := db.GetFS(FS_IMGS)
- return fs.Create(title + ".jpg")
-}
-
-func resizeImg(imgReader io.Reader, width uint) (image.Image, error) {
- img, _, err := image.Decode(imgReader)
- if err != nil {
- return nil, err
- }
-
- return resize.Resize(width, 0, img, resize.NearestNeighbor), nil
-}
diff --git a/tools/importer/database.go b/tools/importer/database.go
deleted file mode 100644
index e49b644..0000000
--- a/tools/importer/database.go
+++ /dev/null
@@ -1,327 +0,0 @@
-package main
-
-import log "github.com/cihub/seelog"
-
-import (
- "crypto/md5"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "os"
- "time"
-)
-
-type Book struct {
- Id string `bson:"_id"`
- Title string
- Author []string
- Contributor string
- Publisher string
- Description string
- Subject []string
- Date string
- Lang []string
- Isbn string
- Type string
- Format string
- Source string
- Relation string
- Coverage string
- Rights string
- Meta string
- File bson.ObjectId
- FileSize int
- Cover bson.ObjectId
- CoverSmall bson.ObjectId
- Active bool
- Keywords []string
-}
-
-type News struct {
- Date time.Time
- Text string
-}
-
-type DB struct {
- session *mgo.Session
-}
-
-func initDB() *DB {
- var err error
- d := new(DB)
- d.session, err = mgo.Dial(DB_IP)
- if err != nil {
- log.Critical(err)
- os.Exit(1)
- }
- return d
-}
-
-func (d *DB) Close() {
- d.session.Close()
-}
-
-func (d *DB) Copy() *DB {
- dbCopy := new(DB)
- dbCopy.session = d.session.Copy()
- return dbCopy
-}
-
-func md5Pass(pass string) []byte {
- h := md5.New()
- hash := h.Sum(([]byte)(PASS_SALT + pass))
- return hash
-}
-
-func (d *DB) SetPassword(user string, pass string) error {
- hash := md5Pass(pass)
- userColl := d.session.DB(DB_NAME).C(USERS_COLL)
- return userColl.Update(bson.M{"user": user}, bson.M{"$set": bson.M{"pass": hash}})
-}
-
-func (d *DB) UserValid(user string, pass string) bool {
- hash := md5Pass(pass)
- userColl := d.session.DB(DB_NAME).C(USERS_COLL)
- n, err := userColl.Find(bson.M{"user": user, "pass": hash}).Count()
- if err != nil {
- return false
- }
- return n != 0
-}
-
-func (d *DB) UserRole(user string) string {
- type result struct {
- Role string
- }
- res := result{}
- userColl := d.session.DB(DB_NAME).C(USERS_COLL)
- err := userColl.Find(bson.M{"user": user}).One(&res)
- if err != nil {
- return ""
- }
- return res.Role
-}
-
-func (d *DB) AddNews(text string) error {
- var news News
- news.Text = text
- news.Date = time.Now()
- newsColl := d.session.DB(DB_NAME).C(NEWS_COLL)
- return newsColl.Insert(news)
-}
-
-func (d *DB) GetNews(num int, days int) (news []News, err error) {
- query := bson.M{}
- if days != 0 {
- duration := time.Duration(-24*days) * time.Hour
- date := time.Now().Add(duration)
- query = bson.M{"date": bson.M{"$gt": date}}
- }
- newsColl := d.session.DB(DB_NAME).C(NEWS_COLL)
- q := newsColl.Find(query).Sort("-date").Limit(num)
- err = q.All(&news)
- return
-}
-
-func (d *DB) InsertStats(stats interface{}) error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- return statsColl.Insert(stats)
-}
-
-func (d *DB) InsertBook(book interface{}) error {
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- return booksColl.Insert(book)
-}
-
-func (d *DB) RemoveBook(id bson.ObjectId) error {
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- return booksColl.Remove(bson.M{"_id": id})
-}
-
-func (d *DB) UpdateBook(id bson.ObjectId, data interface{}) error {
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- return booksColl.Update(bson.M{"_id": id}, bson.M{"$set": data})
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
- var start, length int
- if len(r) > 0 {
- length = r[0]
- if len(r) > 1 {
- start = r[1]
- }
- }
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- q := booksColl.Find(query).Sort("-_id")
- num, err = q.Count()
- if err != nil {
- return
- }
- if start != 0 {
- q = q.Skip(start)
- }
- if length != 0 {
- q = q.Limit(length)
- }
-
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most visited books
- */
-func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
- visitedColl := d.session.DB(DB_NAME).C(VISITED_COLL)
- bookId, err := GetBooksVisited(num, visitedColl)
- if err != nil {
- return nil, err
- }
-
- books = make([]Book, num)
- for i, id := range bookId {
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- booksColl.Find(bson.M{"_id": id}).One(&books[i])
- books[i].Id = bson.ObjectId(books[i].Id).Hex()
- }
- return
-}
-
-func (d *DB) UpdateMostVisited() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateMostVisited(statsColl)
-}
-
-/* Get the most downloaded books
- */
-func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
- downloadedColl := d.session.DB(DB_NAME).C(DOWNLOADED_COLL)
- bookId, err := GetBooksVisited(num, downloadedColl)
- if err != nil {
- return nil, err
- }
-
- books = make([]Book, num)
- for i, id := range bookId {
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- booksColl.Find(bson.M{"_id": id}).One(&books[i])
- books[i].Id = bson.ObjectId(books[i].Id).Hex()
- }
- return
-}
-
-func (d *DB) UpdateDownloadedBooks() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateMostDownloaded(statsColl)
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetNewBooks(r ...int) (books []Book, num int, err error) {
- return d.GetBooks(bson.M{"$nor": []bson.M{{"active": true}}}, r...)
-}
-
-func (d *DB) BookActive(id bson.ObjectId) bool {
- var book Book
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- err := booksColl.Find(bson.M{"_id": id}).One(&book)
- if err != nil {
- return false
- }
- return book.Active
-}
-
-func (d *DB) GetFS(prefix string) *mgo.GridFS {
- return d.session.DB(DB_NAME).GridFS(prefix)
-}
-
-func (d *DB) GetTags(numTags int) ([]string, error) {
- tagsColl := d.session.DB(DB_NAME).C(TAGS_COLL)
- return GetTags(numTags, tagsColl)
-}
-
-func (d *DB) UpdateTags() error {
- booksColl := d.session.DB(DB_NAME).C(BOOKS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateTags(booksColl)
-}
-
-type Visits struct {
- Date int64 "_id"
- Count int "value"
-}
-
-func (d *DB) GetHourVisits() ([]Visits, error) {
- hourlyColl := d.session.DB(DB_NAME).C(HOURLY_VISITS_COLL)
- return GetVisits(hourlyColl)
-}
-
-func (d *DB) UpdateHourVisits() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateHourVisits(statsColl)
-}
-
-func (d *DB) GetDayVisits() ([]Visits, error) {
- dailyColl := d.session.DB(DB_NAME).C(DAILY_VISITS_COLL)
- return GetVisits(dailyColl)
-}
-
-func (d *DB) UpdateDayVisits() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateDayVisits(statsColl)
-}
-
-func (d *DB) GetMonthVisits() ([]Visits, error) {
- monthlyColl := d.session.DB(DB_NAME).C(MONTHLY_VISITS_COLL)
- return GetVisits(monthlyColl)
-}
-
-func (d *DB) UpdateMonthVisits() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateMonthVisits(statsColl)
-}
-
-func (d *DB) GetHourDownloads() ([]Visits, error) {
- hourlyColl := d.session.DB(DB_NAME).C(HOURLY_DOWNLOADS_COLL)
- return GetVisits(hourlyColl)
-}
-
-func (d *DB) UpdateHourDownloads() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateHourDownloads(statsColl)
-}
-
-func (d *DB) GetDayDownloads() ([]Visits, error) {
- dailyColl := d.session.DB(DB_NAME).C(DAILY_DOWNLOADS_COLL)
- return GetVisits(dailyColl)
-}
-
-func (d *DB) UpdateDayDownloads() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateDayDownloads(statsColl)
-}
-
-func (d *DB) GetMonthDownloads() ([]Visits, error) {
- monthlyColl := d.session.DB(DB_NAME).C(MONTHLY_DOWNLOADS_COLL)
- return GetVisits(monthlyColl)
-}
-
-func (d *DB) UpdateMonthDownloads() error {
- statsColl := d.session.DB(DB_NAME).C(STATS_COLL)
- mr := NewMR(d.session.DB(DB_NAME))
- return mr.UpdateMonthDownloads(statsColl)
-}
diff --git a/tools/importer/importer.go b/tools/importer/importer.go
deleted file mode 100644
index 293bd0c..0000000
--- a/tools/importer/importer.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package main
-
-import log "github.com/cihub/seelog"
-
-import (
- "git.gitorious.org/go-pkg/epubgo.git"
- "net/http"
- "os"
-)
-
-func main() {
- db := initDB()
- defer db.Close()
-
- for _, file := range os.Args[1:len(os.Args)] {
- uploadEpub(file, db)
- }
-}
-
-func uploadEpub(filename string, db *DB) {
- epub, err := epubgo.Open(filename)
- if err != nil {
- log.Error("Not valid epub '", filename, "': ", err)
- return
- }
- defer epub.Close()
-
- book := parseFile(epub, db)
- title, _ := book["title"].(string)
- _, numTitleFound, _ := db.GetBooks(buildQuery("title:"+title), 1)
- if numTitleFound == 0 {
- book["active"] = true
- }
-
- file, _ := os.Open(filename)
- defer file.Close()
- id, size, err := StoreNewFile(title+".epub", file, db)
- if err != nil {
- log.Error("Error storing book (", title, "): ", err)
- return
- }
-
- book["file"] = id
- book["filesize"] = size
- err = db.InsertBook(book)
- if err != nil {
- log.Error("Error storing metadata (", title, "): ", err)
- return
- }
- log.Info("File uploaded: ", filename)
-}
-
-type Status struct {
- Upload bool
- Stats bool
- Search string
-}
-
-func GetStatus(h handler) Status {
- return Status{}
-}
-
-func loadTemplate(w http.ResponseWriter, tmpl string, data interface{}) {}
-func loadTxtTemplate(w http.ResponseWriter, tmpl string, data interface{}) {}
-func notFound(h handler) {}
diff --git a/tools/importer/mapreduce.go b/tools/importer/mapreduce.go
deleted file mode 100644
index c24deec..0000000
--- a/tools/importer/mapreduce.go
+++ /dev/null
@@ -1,283 +0,0 @@
-package main
-
-import (
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "time"
-)
-
-func GetTags(numTags int, tagsColl *mgo.Collection) ([]string, error) {
- var result []struct {
- Tag string "_id"
- }
- err := tagsColl.Find(nil).Sort("-value").Limit(numTags).All(&result)
- if err != nil {
- return nil, err
- }
-
- tags := make([]string, len(result))
- for i, r := range result {
- tags[i] = r.Tag
- }
- return tags, nil
-}
-
-func GetBooksVisited(num int, visitedColl *mgo.Collection) ([]bson.ObjectId, error) {
- var result []struct {
- Book bson.ObjectId "_id"
- }
- err := visitedColl.Find(nil).Sort("-value").Limit(num).All(&result)
- if err != nil {
- return nil, err
- }
-
- books := make([]bson.ObjectId, len(result))
- for i, r := range result {
- books[i] = r.Book
- }
- return books, nil
-}
-
-func GetVisits(visitsColl *mgo.Collection) ([]Visits, error) {
- var result []Visits
- err := visitsColl.Find(nil).All(&result)
- return result, err
-}
-
-type MR struct {
- database *mgo.Database
-}
-
-func NewMR(database *mgo.Database) *MR {
- m := new(MR)
- m.database = database
- return m
-}
-
-func (m *MR) UpdateTags(booksColl *mgo.Collection) error {
- var mr mgo.MapReduce
- mr.Map = `function() {
- if (this.subject) {
- this.subject.forEach(function(s) { emit(s, 1); });
- }
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- return m.update(&mr, bson.M{"active": true}, booksColl, TAGS_COLL)
-}
-
-func (m *MR) UpdateMostVisited(statsColl *mgo.Collection) error {
- var mr mgo.MapReduce
- mr.Map = `function() {
- if (this.id) {
- emit(this.id, 1);
- }
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- return m.update(&mr, bson.M{"section": "book"}, statsColl, VISITED_COLL)
-}
-
-func (m *MR) UpdateMostDownloaded(statsColl *mgo.Collection) error {
- var mr mgo.MapReduce
- mr.Map = `function() {
- emit(this.id, 1);
- }`
- mr.Reduce = `function(tag, vals) {
- var count = 0;
- vals.forEach(function() { count += 1; });
- return count;
- }`
- return m.update(&mr, bson.M{"section": "download"}, statsColl, DOWNLOADED_COLL)
-}
-
-func (m *MR) UpdateHourVisits(statsColl *mgo.Collection) error {
- const numDays = 2
- start := time.Now().UTC().Add(-numDays * 24 * time.Hour)
-
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate(),
- this.date.getUTCHours());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, HOURLY_VISITS_COLL+"_raw")
- if err != nil {
- return err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- hourly_raw := m.database.C(HOURLY_VISITS_COLL + "_raw")
- return m.update(&mr2, bson.M{}, hourly_raw, HOURLY_VISITS_COLL)
-}
-
-func (m *MR) UpdateDayVisits(statsColl *mgo.Collection) error {
- const numDays = 30
- start := time.Now().UTC().Add(-numDays * 24 * time.Hour).Truncate(24 * time.Hour)
-
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, DAILY_VISITS_COLL+"_raw")
- if err != nil {
- return err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- daily_raw := m.database.C(DAILY_VISITS_COLL + "_raw")
- return m.update(&mr2, bson.M{}, daily_raw, DAILY_VISITS_COLL)
-}
-
-func (m *MR) UpdateMonthVisits(statsColl *mgo.Collection) error {
- const numDays = 365
-
- start := time.Now().UTC().Add(-numDays * 24 * time.Hour).Truncate(24 * time.Hour)
-
- const reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- var mr mgo.MapReduce
- mr.Map = `function() {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth());
- emit({date: date, session: this.session}, 1);
- }`
- mr.Reduce = reduce
- err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, MONTHLY_VISITS_COLL+"_raw")
- if err != nil {
- return err
- }
- var mr2 mgo.MapReduce
- mr2.Map = `function() {
- emit(this['_id']['date'], 1);
- }`
- mr2.Reduce = reduce
- monthly_raw := m.database.C(MONTHLY_VISITS_COLL + "_raw")
- return m.update(&mr2, bson.M{}, monthly_raw, MONTHLY_VISITS_COLL)
-}
-
-func (m *MR) UpdateHourDownloads(statsColl *mgo.Collection) error {
- const numDays = 2
- start := time.Now().UTC().Add(-numDays * 24 * time.Hour)
-
- var mr mgo.MapReduce
- mr.Map = `function() {
- if (this.section == "download") {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate(),
- this.date.getUTCHours());
- emit(date, 1);
- }
- }`
- mr.Reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- return m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, HOURLY_DOWNLOADS_COLL)
-}
-
-func (m *MR) UpdateDayDownloads(statsColl *mgo.Collection) error {
- const numDays = 30
- start := time.Now().UTC().Add(-numDays * 24 * time.Hour).Truncate(24 * time.Hour)
-
- var mr mgo.MapReduce
- mr.Map = `function() {
- if (this.section == "download") {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth(),
- this.date.getUTCDate());
- emit(date, 1);
- }
- }`
- mr.Reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- return m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, DAILY_DOWNLOADS_COLL)
-}
-
-func (m *MR) UpdateMonthDownloads(statsColl *mgo.Collection) error {
- const numDays = 365
-
- start := time.Now().UTC().Add(-numDays * 24 * time.Hour).Truncate(24 * time.Hour)
-
- var mr mgo.MapReduce
- mr.Map = `function() {
- if (this.section == "download") {
- var date = Date.UTC(this.date.getUTCFullYear(),
- this.date.getUTCMonth());
- emit(date, 1);
- }
- }`
- mr.Reduce = `function(date, vals) {
- var count = 0;
- vals.forEach(function(v) { count += v; });
- return count;
- }`
- return m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, MONTHLY_DOWNLOADS_COLL)
-}
-
-func (m *MR) update(mr *mgo.MapReduce, query bson.M, queryColl *mgo.Collection, storeColl string) error {
- metaColl := m.database.C(META_COLL)
- _, err := metaColl.RemoveAll(bson.M{"type": storeColl})
- if err != nil {
- return err
- }
-
- mr.Out = bson.M{"replace": storeColl}
- _, err = queryColl.Find(query).MapReduce(mr, nil)
- if err != nil {
- return err
- }
-
- return metaColl.Insert(bson.M{"type": storeColl})
-}
-
-func (m *MR) isOutdated(coll string, minutes float64) bool {
- var result struct {
- Id bson.ObjectId `bson:"_id"`
- }
- metaColl := m.database.C(META_COLL)
- err := metaColl.Find(bson.M{"type": coll}).One(&result)
- if err != nil {
- return true
- }
-
- lastUpdate := result.Id.Time()
- return time.Since(lastUpdate).Minutes() > minutes
-}
diff --git a/tools/importer/search.go b/tools/importer/search.go
deleted file mode 100644
index 9f94543..0000000
--- a/tools/importer/search.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package main
-
-import (
- "labix.org/v2/mgo/bson"
- "net/http"
- "strconv"
- "strings"
-)
-
-func buildQuery(q string) bson.M {
- var reg []bson.RegEx
- query := bson.M{"active": true}
- words := strings.Split(q, " ")
- for _, w := range words {
- tag := strings.SplitN(w, ":", 2)
- if len(tag) > 1 {
- query[tag[0]] = bson.RegEx{tag[1], "i"}
- } else {
- reg = append(reg, bson.RegEx{w, "i"})
- }
- }
- if len(reg) > 0 {
- query["keywords"] = bson.M{"$all": reg}
- }
- return query
-}
-
-type searchData struct {
- S Status
- Found int
- Books []Book
- ItemsPage int
- Page int
- Next string
- Prev string
-}
-
-func searchHandler(h handler) {
- err := h.r.ParseForm()
- if err != nil {
- http.Error(h.w, err.Error(), http.StatusInternalServerError)
- return
- }
- req := strings.Join(h.r.Form["q"], " ")
- page := 0
- if len(h.r.Form["p"]) != 0 {
- page, err = strconv.Atoi(h.r.Form["p"][0])
- if err != nil {
- page = 0
- }
- }
- items_page := itemsPage(h.r)
- res, num, _ := h.db.GetBooks(buildQuery(req), items_page, page*items_page)
-
- var data searchData
- data.S = GetStatus(h)
- data.S.Search = req
- data.Books = res
- data.ItemsPage = items_page
- data.Found = num
- data.Page = page + 1
- if num > (page+1)*items_page {
- data.Next = "/search/?q=" + req + "&p=" + strconv.Itoa(page+1) + "&num=" + strconv.Itoa(items_page)
- }
- if page > 0 {
- data.Prev = "/search/?q=" + req + "&p=" + strconv.Itoa(page-1) + "&num=" + strconv.Itoa(items_page)
- }
-
- format := h.r.Form["fmt"]
- if (len(format) > 0) && (format[0] == "rss") {
- loadTxtTemplate(h.w, "search_rss.xml", data)
- } else {
- loadTemplate(h.w, "search", data)
- }
-}
-
-func itemsPage(r *http.Request) int {
- if len(r.Form["num"]) > 0 {
- items_page, err := strconv.Atoi(r.Form["num"][0])
- if err == nil {
- return items_page
- }
- }
- return SEARCH_ITEMS_PAGE
-}
diff --git a/tools/importer/session.go b/tools/importer/session.go
deleted file mode 100644
index e958cdc..0000000
--- a/tools/importer/session.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package main
-
-import (
- "encoding/hex"
- "github.com/gorilla/securecookie"
- "github.com/gorilla/sessions"
- "net/http"
-)
-
-var sesStore = sessions.NewCookieStore(securecookie.GenerateRandomKey(64))
-
-type Notification struct {
- Title string
- Msg string
- Type string /* error, info or success */
-}
-
-type Session struct {
- User string
- Role string
- S *sessions.Session
-}
-
-func GetSession(r *http.Request, db *DB) (s *Session) {
- s = new(Session)
- var err error
- s.S, err = sesStore.Get(r, "session")
- if err == nil && !s.S.IsNew {
- s.User, _ = s.S.Values["user"].(string)
- s.Role = db.UserRole(s.User)
- }
-
- if s.S.IsNew {
- s.S.Values["id"] = hex.EncodeToString(securecookie.GenerateRandomKey(16))
- }
-
- return
-}
-
-func (s *Session) GetNotif() []Notification {
- session := s.S
- msgs := session.Flashes("nMsg")
- titles := session.Flashes("nTitle")
- tpes := session.Flashes("nType")
- notif := make([]Notification, len(msgs))
- for i, m := range msgs {
- msg, _ := m.(string)
- title, _ := titles[i].(string)
- tpe, _ := tpes[i].(string)
- notif[i] = Notification{title, msg, tpe}
- }
- return notif
-}
-
-func (s *Session) LogIn(user string) {
- s.User = user
- s.S.Values["user"] = user
-}
-
-func (s *Session) LogOut() {
- s.S.Values["user"] = ""
-}
-
-func (s *Session) Notify(title, msg, tpe string) {
- s.S.AddFlash(msg, "nMsg")
- s.S.AddFlash(title, "nTitle")
- s.S.AddFlash(tpe, "nType")
-}
-
-func (s *Session) Save(w http.ResponseWriter, r *http.Request) {
- sesStore.Save(r, w, s.S)
-}
-
-func (s *Session) Id() string {
- id, _ := s.S.Values["id"].(string)
- return id
-}
-
-func (s *Session) IsAdmin() bool {
- return s.Role == "admin"
-}
diff --git a/tools/importer/stats.go b/tools/importer/stats.go
deleted file mode 100644
index 4218ddf..0000000
--- a/tools/importer/stats.go
+++ /dev/null
@@ -1,244 +0,0 @@
-package main
-
-import log "github.com/cihub/seelog"
-
-import (
- "github.com/gorilla/mux"
- "labix.org/v2/mgo/bson"
- "net/http"
- "strconv"
- "strings"
- "time"
-)
-
-type handler struct {
- w http.ResponseWriter
- r *http.Request
- sess *Session
- db *DB
-}
-
-func InitStats(database *DB) {
- statsChannel = make(chan statsRequest, CHAN_SIZE)
- go statsWorker(database)
-}
-
-func GatherStats(function func(handler), database *DB) func(http.ResponseWriter, *http.Request) {
- return func(w http.ResponseWriter, r *http.Request) {
- log.Info("Query ", r.Method, " ", r.RequestURI)
-
- var h handler
- h.db = database.Copy()
- defer h.db.Close()
-
- h.w = w
- h.r = r
- h.sess = GetSession(r, h.db)
- function(h)
-
- statsChannel <- statsRequest{bson.Now(), mux.Vars(r), h.sess, r}
- }
-}
-
-var statsChannel chan statsRequest
-
-type statsRequest struct {
- date time.Time
- vars map[string]string
- sess *Session
- r *http.Request
-}
-
-func statsWorker(database *DB) {
- db := database.Copy()
- defer db.Close()
-
- for req := range statsChannel {
- stats := make(map[string]interface{})
- appendFiles(req.r, stats)
- appendMuxVars(req.vars, stats)
- appendUrl(req.r, stats)
- appendSession(req.sess, stats)
- stats["method"] = req.r.Method
- stats["date"] = req.date
- db.InsertStats(stats)
- }
-}
-
-func statsHandler(h handler) {
- var data statsData
- data.S = GetStatus(h)
- data.S.Stats = true
- data.HVisits = getHourlyVisits(h.db)
- data.DVisits = getDailyVisits(h.db)
- data.MVisits = getMonthlyVisits(h.db)
- data.HDownloads = getHourlyDownloads(h.db)
- data.DDownloads = getDailyDownloads(h.db)
- data.MDownloads = getMonthlyDownloads(h.db)
-
- loadTemplate(h.w, "stats", data)
-}
-
-type statsData struct {
- S Status
- HVisits []visitData
- DVisits []visitData
- MVisits []visitData
- HDownloads []visitData
- DDownloads []visitData
- MDownloads []visitData
-}
-
-type visitData struct {
- Label string
- Count int
-}
-
-func getHourlyVisits(db *DB) []visitData {
- var visits []visitData
-
- visit, _ := db.GetHourVisits()
- for _, v := range visit {
- var elem visitData
- hour := time.Unix(v.Date/1000, 0).UTC().Hour()
- elem.Label = strconv.Itoa(hour + 1)
- elem.Count = v.Count
- visits = append(visits, elem)
- }
-
- return visits
-}
-
-func getDailyVisits(db *DB) []visitData {
- var visits []visitData
-
- visit, _ := db.GetDayVisits()
- for _, v := range visit {
- var elem visitData
- day := time.Unix(v.Date/1000, 0).UTC().Day()
- elem.Label = strconv.Itoa(day)
- elem.Count = v.Count
- visits = append(visits, elem)
- }
-
- return visits
-}
-
-func getMonthlyVisits(db *DB) []visitData {
- var visits []visitData
-
- visit, _ := db.GetMonthVisits()
- for _, v := range visit {
- var elem visitData
- month := time.Unix(v.Date/1000, 0).UTC().Month()
- elem.Label = month.String()
- elem.Count = v.Count
- visits = append(visits, elem)
- }
-
- return visits
-}
-
-func getHourlyDownloads(db *DB) []visitData {
- var visits []visitData
-
- visit, _ := db.GetHourDownloads()
- for _, v := range visit {
- var elem visitData
- hour := time.Unix(v.Date/1000, 0).UTC().Hour()
- elem.Label = strconv.Itoa(hour + 1)
- elem.Count = v.Count
- visits = append(visits, elem)
- }
-
- return visits
-}
-
-func getDailyDownloads(db *DB) []visitData {
- var visits []visitData
-
- visit, _ := db.GetDayDownloads()
- for _, v := range visit {
- var elem visitData
- day := time.Unix(v.Date/1000, 0).UTC().Day()
- elem.Label = strconv.Itoa(day)
- elem.Count = v.Count
- visits = append(visits, elem)
- }
-
- return visits
-}
-
-func getMonthlyDownloads(db *DB) []visitData {
- var visits []visitData
-
- visit, _ := db.GetMonthDownloads()
- for _, v := range visit {
- var elem visitData
- month := time.Unix(v.Date/1000, 0).UTC().Month()
- elem.Label = month.String()
- elem.Count = v.Count
- visits = append(visits, elem)
- }
-
- return visits
-}
-
-func appendFiles(r *http.Request, stats map[string]interface{}) {
- if r.Method == "POST" && r.MultipartForm != nil {
- files := r.MultipartForm.File
- for key := range files {
- list := make([]string, len(files[key]))
- for i, f := range files[key] {
- list[i] = f.Filename
- }
- stats[key] = list
- }
- }
-}
-
-func appendMuxVars(vars map[string]string, stats map[string]interface{}) {
- for key, value := range vars {
- switch {
- case key == "id":
- if bson.IsObjectIdHex(value) {
- stats["id"] = bson.ObjectIdHex(value)
- }
- case key == "ids":
- var objectIds []bson.ObjectId
- ids := strings.Split(value, "/")
- for _, id := range ids {
- if bson.IsObjectIdHex(value) {
- objectIds = append(objectIds, bson.ObjectIdHex(id))
- }
- }
- if len(objectIds) > 0 {
- stats["ids"] = objectIds
- stats["id"] = objectIds[0]
- }
- default:
- stats[key] = value
- }
- }
-}
-
-func appendUrl(r *http.Request, stats map[string]interface{}) {
- for key, value := range r.URL.Query() {
- stats[key] = value
- }
- stats["host"] = r.Host
- stats["path"] = r.URL.Path
- pattern := strings.Split(r.URL.Path, "/")
- if len(pattern) > 1 && pattern[1] != "" {
- stats["section"] = pattern[1]
- } else {
- stats["section"] = "/"
- }
-}
-
-func appendSession(sess *Session, stats map[string]interface{}) {
- stats["session"] = sess.Id()
- if sess.User != "" {
- stats["user"] = sess.User
- }
-}
diff --git a/tools/importer/store.go b/tools/importer/store.go
deleted file mode 100644
index 5b0ee8c..0000000
--- a/tools/importer/store.go
+++ /dev/null
@@ -1,128 +0,0 @@
-package main
-
-import (
- "bytes"
- "git.gitorious.org/go-pkg/epubgo.git"
- "io"
- "io/ioutil"
- "labix.org/v2/mgo/bson"
- "regexp"
- "strings"
-)
-
-func OpenBook(id bson.ObjectId, db *DB) (*epubgo.Epub, error) {
- fs := db.GetFS(FS_BOOKS)
- f, err := fs.OpenId(id)
- if err != nil {
- return nil, err
- }
- defer f.Close()
-
- buff, err := ioutil.ReadAll(f)
- reader := bytes.NewReader(buff)
-
- return epubgo.Load(reader, int64(len(buff)))
-}
-
-func StoreNewFile(name string, file io.Reader, db *DB) (bson.ObjectId, int64, error) {
- fs := db.GetFS(FS_BOOKS)
- fw, err := fs.Create(name)
- if err != nil {
- return "", 0, err
- }
- defer fw.Close()
-
- size, err := io.Copy(fw, file)
- id, _ := fw.Id().(bson.ObjectId)
- return id, size, err
-}
-
-func DeleteFile(id bson.ObjectId, db *DB) error {
- fs := db.GetFS(FS_BOOKS)
- return fs.RemoveId(id)
-}
-
-func DeleteCover(id bson.ObjectId, db *DB) error {
- fs := db.GetFS(FS_IMGS)
- return fs.RemoveId(id)
-}
-
-func DeleteBook(book Book, db *DB) {
- if book.Cover != "" {
- DeleteCover(book.Cover, db)
- }
- if book.CoverSmall != "" {
- DeleteCover(book.CoverSmall, db)
- }
- DeleteFile(book.File, db)
-}
-
-func cleanStr(str string) string {
- str = strings.Replace(str, "'", "'", -1)
- exp, _ := regexp.Compile("&[^;]*;")
- str = exp.ReplaceAllString(str, "")
- exp, _ = regexp.Compile("[ ,]*$")
- str = exp.ReplaceAllString(str, "")
- return str
-}
-
-func parseAuthr(creator []string) []string {
- exp1, _ := regexp.Compile("^(.*\\( *([^\\)]*) *\\))*$")
- exp2, _ := regexp.Compile("^[^:]*: *(.*)$")
- res := make([]string, len(creator))
- for i, s := range creator {
- auth := exp1.FindStringSubmatch(s)
- if auth != nil {
- res[i] = cleanStr(strings.Join(auth[2:], ", "))
- } else {
- auth := exp2.FindStringSubmatch(s)
- if auth != nil {
- res[i] = cleanStr(auth[1])
- } else {
- res[i] = cleanStr(s)
- }
- }
- }
- return res
-}
-
-func parseDescription(description []string) string {
- str := cleanStr(strings.Join(description, "\n"))
- str = strings.Replace(str, "", "\n", -1)
- exp, _ := regexp.Compile("<[^>]*>")
- str = exp.ReplaceAllString(str, "")
- str = strings.Replace(str, "&", "&", -1)
- str = strings.Replace(str, "<", "<", -1)
- str = strings.Replace(str, ">", ">", -1)
- str = strings.Replace(str, "\\n", "\n", -1)
- return str
-}
-
-func parseSubject(subject []string) []string {
- var res []string
- for _, s := range subject {
- res = append(res, strings.Split(s, " / ")...)
- }
- return res
-}
-
-func parseDate(date []string) string {
- if len(date) == 0 {
- return ""
- }
- return strings.Replace(date[0], "Unspecified: ", "", -1)
-}
-
-func keywords(b map[string]interface{}) (k []string) {
- title, _ := b["title"].(string)
- k = strings.Split(title, " ")
- author, _ := b["author"].([]string)
- for _, a := range author {
- k = append(k, strings.Split(a, " ")...)
- }
- publisher, _ := b["publisher"].(string)
- k = append(k, strings.Split(publisher, " ")...)
- subject, _ := b["subject"].([]string)
- k = append(k, subject...)
- return
-}
diff --git a/tools/importer/upload.go b/tools/importer/upload.go
deleted file mode 100644
index 8f05f0a..0000000
--- a/tools/importer/upload.go
+++ /dev/null
@@ -1,146 +0,0 @@
-package main
-
-import log "github.com/cihub/seelog"
-
-import (
- "bytes"
- "git.gitorious.org/go-pkg/epubgo.git"
- "io/ioutil"
- "mime/multipart"
- "strings"
-)
-
-func InitUpload(database *DB) {
- uploadChannel = make(chan uploadRequest, CHAN_SIZE)
- go uploadWorker(database)
-}
-
-var uploadChannel chan uploadRequest
-
-type uploadRequest struct {
- file multipart.File
- filename string
-}
-
-func uploadWorker(database *DB) {
- db := database.Copy()
- defer db.Close()
-
- for req := range uploadChannel {
- processFile(req, db)
- }
-}
-
-func processFile(req uploadRequest, db *DB) {
- defer req.file.Close()
-
- epub, err := openMultipartEpub(req.file)
- if err != nil {
- log.Warn("Not valid epub uploaded file ", req.filename, ": ", err)
- return
- }
- defer epub.Close()
-
- book := parseFile(epub, db)
- title, _ := book["title"].(string)
- req.file.Seek(0, 0)
- id, size, err := StoreNewFile(title+".epub", req.file, db)
- if err != nil {
- log.Error("Error storing book (", title, "): ", err)
- return
- }
-
- book["file"] = id
- book["filesize"] = size
- err = db.InsertBook(book)
- if err != nil {
- log.Error("Error storing metadata (", title, "): ", err)
- return
- }
- log.Info("File uploaded: ", req.filename)
-}
-
-func uploadPostHandler(h handler) {
- problem := false
-
- h.r.ParseMultipartForm(20000000)
- filesForm := h.r.MultipartForm.File["epub"]
- for _, f := range filesForm {
- file, err := f.Open()
- if err != nil {
- log.Error("Can not open uploaded file ", f.Filename, ": ", err)
- h.sess.Notify("Upload problem!", "There was a problem with book "+f.Filename, "error")
- problem = true
- continue
- }
- uploadChannel <- uploadRequest{file, f.Filename}
- }
-
- if !problem {
- if len(filesForm) > 0 {
- h.sess.Notify("Upload successful!", "Thank you for your contribution", "success")
- } else {
- h.sess.Notify("Upload problem!", "No books where uploaded.", "error")
- }
- }
- uploadHandler(h)
-}
-
-func uploadHandler(h handler) {
- var data uploadData
- data.S = GetStatus(h)
- data.S.Upload = true
- loadTemplate(h.w, "upload", data)
-}
-
-type uploadData struct {
- S Status
-}
-
-func openMultipartEpub(file multipart.File) (*epubgo.Epub, error) {
- buff, _ := ioutil.ReadAll(file)
- reader := bytes.NewReader(buff)
- return epubgo.Load(reader, int64(len(buff)))
-}
-
-func parseFile(epub *epubgo.Epub, db *DB) map[string]interface{} {
- book := map[string]interface{}{}
- for _, m := range epub.MetadataFields() {
- data, err := epub.Metadata(m)
- if err != nil {
- continue
- }
- switch m {
- case "creator":
- book["author"] = parseAuthr(data)
- case "description":
- book[m] = parseDescription(data)
- case "subject":
- book[m] = parseSubject(data)
- case "date":
- book[m] = parseDate(data)
- case "language":
- book["lang"] = data
- case "title", "contributor", "publisher":
- book[m] = cleanStr(strings.Join(data, ", "))
- case "identifier":
- attr, _ := epub.MetadataAttr(m)
- for i, d := range data {
- if attr[i]["scheme"] == "ISBN" {
- book["isbn"] = d
- }
- }
- default:
- book[m] = strings.Join(data, ", ")
- }
- }
- title, _ := book["title"].(string)
- book["file"] = nil
- cover, coverSmall := GetCover(epub, title, db)
- if cover != "" {
- book["cover"] = cover
- book["coversmall"] = coverSmall
- }
- book["keywords"] = keywords(book)
- return book
-}
diff --git a/tools/keywords/config.go b/tools/keywords/config.go
deleted file mode 100644
index 902dcc4..0000000
--- a/tools/keywords/config.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package main
-
-const (
- PORT = "8080"
-
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- META_COLL = "meta"
- FS_BOOKS = "fs_books"
- FS_IMGS = "fs_imgs"
-
- PASS_SALT = "ImperialLibSalt"
- MINUTES_UPDATE_TAGS = 11
- MINUTES_UPDATE_VISITED = 41
- MINUTES_UPDATE_DOWNLOADED = 47
- MINUTES_UPDATE_HOURLY_V = 31
- MINUTES_UPDATE_DAILY_V = 60*12 + 7
- MINUTES_UPDATE_MONTHLY_V = 60*24 + 11
- MINUTES_UPDATE_HOURLY_D = 29
- MINUTES_UPDATE_DAILY_D = 60*12 + 13
- MINUTES_UPDATE_MONTHLY_D = 60*24 + 17
- MINUTES_UPDATE_LOGGER = 5
- BOOKS_FRONT_PAGE = 6
- SEARCH_ITEMS_PAGE = 20
- NEW_ITEMS_PAGE = 50
- NUM_NEWS = 10
- DAYS_NEWS_INDEXPAGE = 15
-
- TEMPLATE_PATH = "templates/"
- CSS_PATH = "css/"
- JS_PATH = "js/"
- IMG_PATH = "img/"
- LOGGER_CONFIG = "logger.xml"
-
- IMG_WIDTH_BIG = 300
- IMG_WIDTH_SMALL = 60
- IMG_QUALITY = 80
-
- CHAN_SIZE = 100
-)
diff --git a/tools/keywords/keywords.go b/tools/keywords/keywords.go
deleted file mode 100644
index 1bc5fd5..0000000
--- a/tools/keywords/keywords.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package main
-
-import (
- "fmt"
- "git.gitorious.org/trantor/trantor.git/database"
- "gopkgs.com/unidecode.v1"
- "labix.org/v2/mgo/bson"
- "strings"
- "unicode"
-)
-
-func main() {
- db := database.Init(DB_IP, DB_NAME)
- defer db.Close()
-
- books, _, err := db.GetBooks(bson.M{}, 0, 0)
- if err != nil {
- fmt.Println(err)
- return
- }
-
- for _, b := range books {
- fmt.Println(b.Title)
- book := map[string]interface{}{
- "title": b.Title,
- "author": b.Author,
- "publisher": b.Publisher,
- "subject": b.Subject,
- }
- k := keywords(book)
- book = map[string]interface{}{"keywords": k}
- id := bson.ObjectIdHex(b.Id)
- err := db.UpdateBook(id, book)
- if err != nil {
- fmt.Println(err)
- }
- }
-}
-
-func keywords(b map[string]interface{}) (k []string) {
- title, _ := b["title"].(string)
- k = tokens(title)
- author, _ := b["author"].([]string)
- for _, a := range author {
- k = append(k, tokens(a)...)
- }
- publisher, _ := b["publisher"].(string)
- k = append(k, tokens(publisher)...)
- subject, _ := b["subject"].([]string)
- for _, s := range subject {
- k = append(k, tokens(s)...)
- }
- return
-}
-
-func tokens(str string) []string {
- str = unidecode.Unidecode(str)
- str = strings.ToLower(str)
- f := func(r rune) bool {
- return unicode.IsControl(r) || unicode.IsPunct(r) || unicode.IsSpace(r)
- }
- return strings.FieldsFunc(str, f)
-}
diff --git a/tools/store/store.go b/tools/store/store.go
deleted file mode 100644
index 749ffb8..0000000
--- a/tools/store/store.go
+++ /dev/null
@@ -1,148 +0,0 @@
-package main
-
-import (
- log "github.com/cihub/seelog"
-
- "crypto/rand"
- "encoding/base64"
- "os"
-
- "git.gitorious.org/trantor/trantor.git/storage"
- "gopkg.in/mgo.v2"
- "gopkg.in/mgo.v2/bson"
-)
-
-const (
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- BOOKS_COLL = "books"
- FS_BOOKS = "fs_books"
- FS_IMGS = "fs_imgs"
-
- STORE_PATH = "store/"
- EPUB_FILE = "book.epub"
- COVER_FILE = "cover.jpg"
- COVER_SMALL_FILE = "coverSmall.jpg"
-
- NUM_WORKERS = 10
-)
-
-type Book struct {
- Id bson.ObjectId `bson:"_id"`
- Title string
- Author []string
- Contributor string
- Publisher string
- Description string
- Subject []string
- Date string
- Lang []string
- Isbn string
- Type string
- Format string
- Source string
- Relation string
- Coverage string
- Rights string
- Meta string
- File bson.ObjectId
- FileSize int
- Cover bson.ObjectId
- CoverSmall bson.ObjectId
- Active bool
- Keywords []string
-}
-
-func main() {
- db := InitDB(DB_IP)
- defer db.Close()
- store, err := storage.Init(STORE_PATH)
- if err != nil {
- log.Critical(err)
- os.Exit(1)
- }
-
- channel := make(chan Book)
- quit := make(chan bool)
- for i := 0; i < NUM_WORKERS; i++ {
- go worker(channel, quit, db, store)
- }
-
- booksColl := db.DB(DB_NAME).C(BOOKS_COLL)
- books := booksColl.Find(bson.M{}).Batch(200).Prefetch(0.25).Iter()
- var book Book
- for books.Next(&book) {
- channel <- book
- }
- if err := books.Close(); err != nil {
- log.Critical(err)
- }
- close(channel)
-
- for i := 0; i < NUM_WORKERS; i++ {
- log.Info("Worker ", i, " has finished")
- <-quit
- }
-}
-
-func InitDB(host string) *mgo.Session {
- session, err := mgo.Dial(host)
- if err != nil {
- log.Critical(err)
- os.Exit(1)
- }
- return session
-}
-
-func worker(channel chan Book, quit chan bool, database *mgo.Session, store *storage.Store) {
- db := database.Copy()
- defer db.Close()
-
- fsBooks := db.DB(DB_NAME).GridFS(FS_BOOKS)
- fsImgs := db.DB(DB_NAME).GridFS(FS_IMGS)
- booksColl := db.DB(DB_NAME).C(BOOKS_COLL)
-
- for book := range channel {
- id := genId()
- log.Info("== Storing book '", book.Title, "' (", id, ") ==")
- cover := true
-
- process(id, EPUB_FILE, book.File, fsBooks, store)
- err := process(id, COVER_FILE, book.Cover, fsImgs, store)
- if err != nil {
- cover = false
- }
- process(id, COVER_SMALL_FILE, book.CoverSmall, fsImgs, store)
-
- query := bson.M{"$set": bson.M{"id": id, "cover": cover},
- "$unset": bson.M{"file": "", "coversmall": ""}}
- err = booksColl.UpdateId(book.Id, query)
- if err != nil {
- log.Error("Can no update ", book.Id.Hex())
- }
- }
- quit <- true
-}
-
-func process(id string, name string, objId bson.ObjectId, fs *mgo.GridFS, store *storage.Store) error {
- f, err := fs.OpenId(objId)
- if err != nil {
- if name == EPUB_FILE {
- log.Error(id, " - can not open ", objId.Hex())
- }
- return err
- }
- defer f.Close()
-
- _, err = store.Store(id, f, name)
- if err != nil {
- log.Error("Can not store '", id, "' (", objId, ")")
- }
- return err
-}
-
-func genId() string {
- b := make([]byte, 12)
- rand.Read(b)
- return base64.URLEncoding.EncodeToString(b)
-}
diff --git a/tools/togridfs/config.go b/tools/togridfs/config.go
deleted file mode 100644
index 8ed9110..0000000
--- a/tools/togridfs/config.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package main
-
-const (
- PORT = "8080"
-
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- META_COLL = "meta"
- BOOKS_COLL = "books"
- TAGS_COLL = "tags"
- USERS_COLL = "users"
- STATS_COLL = "statistics"
- FS_BOOKS = "fs_books"
- FS_IMGS = "fs_imgs"
-
- PASS_SALT = "ImperialLibSalt"
- MINUTES_UPDATE_TAGS = 10
- TAGS_DISPLAY = 50
- SEARCH_ITEMS_PAGE = 20
- NEW_ITEMS_PAGE = 50
-
- TEMPLATE_PATH = "templates/"
- CSS_PATH = "css/"
- JS_PATH = "js/"
- IMG_PATH = "img/"
-
- IMG_WIDTH_BIG = 300
- IMG_WIDTH_SMALL = 60
- IMG_QUALITY = 80
-
- STATS_CHAN_SIZE = 100
-)
diff --git a/tools/togridfs/cover.go b/tools/togridfs/cover.go
deleted file mode 100644
index f135790..0000000
--- a/tools/togridfs/cover.go
+++ /dev/null
@@ -1,141 +0,0 @@
-package main
-
-import _ "image/png"
-import _ "image/jpeg"
-import _ "image/gif"
-
-import (
- "bytes"
- "git.gitorious.org/go-pkg/epubgo.git"
- "github.com/nfnt/resize"
- "image"
- "image/jpeg"
- "io"
- "io/ioutil"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "log"
- "regexp"
- "strings"
-)
-
-func GetCover(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
- imgId, smallId := searchCommonCoverNames(e, title)
- if imgId != "" {
- return imgId, smallId
- }
-
- /* search for img on the text */
- exp, _ := regexp.Compile(" 0 {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data, "$unset": unset[0]})
- }
- return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data})
-}
-
-func (d *DB) IncVisit(id bson.ObjectId) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$inc": bson.M{"VisitsCount": 1}})
-}
-
-func (d *DB) IncDownload(id bson.ObjectId) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$inc": bson.M{"DownloadCount": 1}})
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
- var start, length int
- if len(r) > 0 {
- length = r[0]
- if len(r) > 1 {
- start = r[1]
- }
- }
- q := d.books.Find(query).Sort("-_id")
- num, err = q.Count()
- if err != nil {
- return
- }
- if start != 0 {
- q = q.Skip(start)
- }
- if length != 0 {
- q = q.Limit(length)
- }
-
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most visited books
- */
-func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
- var q *mgo.Query
- q = d.books.Find(bson.M{"active": true}).Sort("-VisitsCount").Limit(num)
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most downloaded books
- */
-func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
- var q *mgo.Query
- q = d.books.Find(bson.M{"active": true}).Sort("-DownloadCount").Limit(num)
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetNewBooks(r ...int) (books []Book, num int, err error) {
- return d.GetBooks(bson.M{"$nor": []bson.M{{"active": true}}}, r...)
-}
-
-func (d *DB) BookActive(id bson.ObjectId) bool {
- var book Book
- err := d.books.Find(bson.M{"_id": id}).One(&book)
- if err != nil {
- return false
- }
- return book.Active
-}
-
-func (d *DB) GetFS(prefix string) *mgo.GridFS {
- return d.session.DB(DB_NAME).GridFS(prefix)
-}
-
-func (d *DB) areTagsOutdated() bool {
- var result struct {
- Id bson.ObjectId `bson:"_id"`
- }
- err := d.meta.Find(bson.M{"type": META_TYPE_TAGS}).One(&result)
- if err != nil {
- return true
- }
-
- lastUpdate := result.Id.Time()
- return time.Since(lastUpdate).Minutes() > MINUTES_UPDATE_TAGS
-}
-
-func (d *DB) updateTags() error {
- _, err := d.meta.RemoveAll(bson.M{"type": META_TYPE_TAGS})
- if err != nil {
- return err
- }
-
- var mr mgo.MapReduce
- mr.Map = "function() { " +
- "if (this.active) { this.subject.forEach(function(s) { emit(s, 1); }); }" +
- "}"
- mr.Reduce = "function(tag, vals) { " +
- "var count = 0;" +
- "vals.forEach(function() { count += 1; });" +
- "return count;" +
- "}"
- mr.Out = bson.M{"replace": TAGS_COLL}
- _, err = d.books.Find(bson.M{"active": true}).MapReduce(&mr, nil)
- if err != nil {
- return err
- }
-
- return d.meta.Insert(bson.M{"type": META_TYPE_TAGS})
-}
-
-func (d *DB) GetTags(numTags int) ([]string, error) {
- if d.areTagsOutdated() {
- err := d.updateTags()
- if err != nil {
- return nil, err
- }
- }
-
- var result []struct {
- Tag string "_id"
- }
- err := d.tags.Find(nil).Sort("-value").Limit(numTags).All(&result)
- if err != nil {
- return nil, err
- }
- tags := make([]string, len(result))
- for i, r := range result {
- tags[i] = r.Tag
- }
- return tags, nil
-}
diff --git a/tools/togridfs/togridfs.go b/tools/togridfs/togridfs.go
deleted file mode 100644
index fd7b666..0000000
--- a/tools/togridfs/togridfs.go
+++ /dev/null
@@ -1,87 +0,0 @@
-package main
-
-import (
- "fmt"
- "git.gitorious.org/go-pkg/epubgo.git"
- "io"
- "labix.org/v2/mgo/bson"
- "os"
-)
-
-func main() {
- db = initDB()
- defer db.Close()
- books, _, _ := db.GetBooks(bson.M{})
-
- for _, book := range books {
- if book.Path == "" {
- fmt.Println("don't needed -- ", book.Title)
- continue
- }
- fmt.Println(book.Title)
-
- path := getPath(book)
-
- id, err := storeFile(path, book)
- if err != nil {
- fmt.Println("storeFile ================", err)
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"active": false})
- continue
- }
-
- cover, coverSmall, err := cover(path, book)
- if err != nil {
- fmt.Println("cover ================", err)
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"active": false, "file": id})
- continue
- }
-
- if cover != "" {
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"cover": cover, "coversmall": coverSmall, "file": id}, bson.M{"path": 1})
- } else {
- fmt.Println("No cover ================", book.Title)
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"active": false, "file": id})
- }
- }
-}
-
-func getPath(book Book) string {
- if !book.Active {
- return "new/" + book.Path
- }
- return "books/" + book.Path
-}
-
-func storeFile(path string, book Book) (bson.ObjectId, error) {
- fs := db.GetFS(FS_BOOKS)
-
- file, err := os.Open(path)
- if err != nil {
- return "", err
- }
- defer file.Close()
-
- fw, err := fs.Create(book.Title + ".epub")
- if err != nil {
- return "", err
- }
- defer fw.Close()
- id, _ := fw.Id().(bson.ObjectId)
-
- _, err = io.Copy(fw, file)
- if err != nil {
- return id, err
- }
- return id, nil
-}
-
-func cover(path string, book Book) (bson.ObjectId, bson.ObjectId, error) {
- e, err := epubgo.Open(path)
- if err != nil {
- return "", "", err
- }
- defer e.Close()
-
- cover, coverSmall := GetCover(e, book.Title)
- return cover, coverSmall, err
-}
diff --git a/tools/update/config.go b/tools/update/config.go
deleted file mode 100644
index fdbb611..0000000
--- a/tools/update/config.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package main
-
-const (
- PORT = "8080"
- DB_IP = "127.0.0.1"
- DB_NAME = "trantor"
- BOOKS_COLL = "books"
- NEW_BOOKS_COLL = "new"
- USERS_COLL = "users"
- PASS_SALT = "ImperialLibSalt"
- TAGS_DISPLAY = 50
- SEARCH_ITEMS_PAGE = 10
- TEMPLATE_PATH = "templates/"
- BOOKS_PATH = "books/"
- COVER_PATH = "cover/"
- NEW_PATH = "new/"
- CSS_PATH = "css/"
- JS_PATH = "js/"
- IMG_PATH = "img/"
- RESIZE_CMD = "/usr/bin/convert -resize 300 -quality 60 "
- RESIZE_THUMB_CMD = "/usr/bin/convert -resize 60 -quality 60 "
-)
diff --git a/tools/update/database.go b/tools/update/database.go
deleted file mode 100644
index 2f48536..0000000
--- a/tools/update/database.go
+++ /dev/null
@@ -1,214 +0,0 @@
-package main
-
-import (
- "crypto/md5"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- "sort"
-)
-
-var db *DB
-
-type Book struct {
- Id string `bson:"_id"`
- Title string
- Author []string
- Contributor string
- Publisher string
- Description string
- Subject []string
- Date string
- Lang []string
- Type string
- Format string
- Source string
- Relation string
- Coverage string
- Rights string
- Meta string
- Path string
- Cover string
- CoverSmall string
- Active bool
- Keywords []string
-}
-
-type DB struct {
- session *mgo.Session
- books *mgo.Collection
- user *mgo.Collection
-}
-
-func initDB() *DB {
- var err error
- d := new(DB)
- d.session, err = mgo.Dial(DB_IP)
- if err != nil {
- panic(err)
- }
-
- d.books = d.session.DB(DB_NAME).C(BOOKS_COLL)
- d.user = d.session.DB(DB_NAME).C(USERS_COLL)
- return d
-}
-
-func (d *DB) Close() {
- d.session.Close()
-}
-
-func md5Pass(pass string) []byte {
- h := md5.New()
- hash := h.Sum(([]byte)(PASS_SALT + pass))
- return hash
-}
-
-func (d *DB) SetPassword(user string, pass string) error {
- hash := md5Pass(pass)
- return d.user.Update(bson.M{"user": user}, bson.M{"$set": bson.M{"pass": hash}})
-}
-
-func (d *DB) UserValid(user string, pass string) bool {
- hash := md5Pass(pass)
- n, err := d.user.Find(bson.M{"user": user, "pass": hash}).Count()
- if err != nil {
- return false
- }
- return n != 0
-}
-
-func (d *DB) InsertBook(book interface{}) error {
- return d.books.Insert(book)
-}
-
-func (d *DB) RemoveBook(id bson.ObjectId) error {
- return d.books.Remove(bson.M{"_id": id})
-}
-
-func (d *DB) UpdateBook(id bson.ObjectId, data interface{}) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data})
-}
-
-func (d *DB) IncVisit(id bson.ObjectId) error {
- return d.books.Update(bson.M{"_id": id}, bson.M{"$inc": bson.M{"VisitsCount": 1}})
-}
-
-func (d *DB) IncDownload(path string) error {
- return d.books.Update(bson.M{"path": path}, bson.M{"$inc": bson.M{"DownloadCount": 1}})
-}
-
-/* optional parameters: length and start index
- *
- * Returns: list of books, number found and err
- */
-func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
- var start, length int
- if len(r) > 0 {
- length = r[0]
- if len(r) > 1 {
- start = r[1]
- }
- }
- q := d.books.Find(query).Sort("-_id")
- num, err = q.Count()
- if err != nil {
- return
- }
- if start != 0 {
- q = q.Skip(start)
- }
- if length != 0 {
- q = q.Limit(length)
- }
-
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most visited books
- */
-func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
- var q *mgo.Query
- q = d.books.Find(bson.M{"active": true}).Sort("-VisitsCount").Limit(num)
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Get the most downloaded books
- */
-func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
- var q *mgo.Query
- q = d.books.Find(bson.M{"active": true}).Sort("-DownloadCount").Limit(num)
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-/* Returns: list of books, number found and err
- */
-func (d *DB) GetNewBooks() (books []Book, num int, err error) {
- var q *mgo.Query
- q = d.books.Find(bson.M{"$nor": []bson.M{{"active": true}}}).Sort("-_id")
- num, err = q.Count()
- if err != nil {
- return
- }
-
- err = q.All(&books)
- for i, b := range books {
- books[i].Id = bson.ObjectId(b.Id).Hex()
- }
- return
-}
-
-func (d *DB) BookActive(id bson.ObjectId) bool {
- var book Book
- err := d.books.Find(bson.M{"_id": id}).One(&book)
- if err != nil {
- return false
- }
- return book.Active
-}
-
-type tagsList []struct {
- Subject string "_id"
- Count int "value"
-}
-
-func (t tagsList) Len() int {
- return len(t)
-}
-func (t tagsList) Less(i, j int) bool {
- return t[i].Count > t[j].Count
-}
-func (t tagsList) Swap(i, j int) {
- aux := t[i]
- t[i] = t[j]
- t[j] = aux
-}
-
-func (d *DB) GetTags() (tagsList, error) {
- // TODO: cache the tags
- var mr mgo.MapReduce
- mr.Map = "function() { " +
- "if (this.active) { this.subject.forEach(function(s) { emit(s, 1); }); }" +
- "}"
- mr.Reduce = "function(tag, vals) { " +
- "var count = 0;" +
- "vals.forEach(function() { count += 1; });" +
- "return count;" +
- "}"
- var result tagsList
- _, err := d.books.Find(nil).MapReduce(&mr, &result)
- if err == nil {
- sort.Sort(result)
- }
- return result, err
-}
diff --git a/tools/update/store.go b/tools/update/store.go
deleted file mode 100644
index 57c0c1e..0000000
--- a/tools/update/store.go
+++ /dev/null
@@ -1,265 +0,0 @@
-package main
-
-import (
- "git.gitorious.org/go-pkg/epub.git"
- "io"
- "log"
- "os"
- "os/exec"
- "regexp"
- "strconv"
- "strings"
- "unicode/utf8"
-)
-
-func ParseFile(path string) (string, error) {
- book := map[string]interface{}{}
-
- e, err := epub.Open(NEW_PATH+path, 0)
- if err != nil {
- return "", err
- }
- defer e.Close()
-
- title := cleanStr(strings.Join(e.Metadata(epub.EPUB_TITLE), ", "))
- book["title"] = title
- book["author"] = parseAuthr(e.Metadata(epub.EPUB_CREATOR))
- book["contributor"] = cleanStr(strings.Join(e.Metadata(epub.EPUB_CONTRIB), ", "))
- book["publisher"] = cleanStr(strings.Join(e.Metadata(epub.EPUB_PUBLISHER), ", "))
- book["description"] = parseDescription(e.Metadata(epub.EPUB_DESCRIPTION))
- book["subject"] = parseSubject(e.Metadata(epub.EPUB_SUBJECT))
- book["date"] = parseDate(e.Metadata(epub.EPUB_DATE))
- book["lang"] = e.Metadata(epub.EPUB_LANG)
- book["type"] = strings.Join(e.Metadata(epub.EPUB_TYPE), ", ")
- book["format"] = strings.Join(e.Metadata(epub.EPUB_FORMAT), ", ")
- book["source"] = strings.Join(e.Metadata(epub.EPUB_SOURCE), ", ")
- book["relation"] = strings.Join(e.Metadata(epub.EPUB_RELATION), ", ")
- book["coverage"] = strings.Join(e.Metadata(epub.EPUB_COVERAGE), ", ")
- book["rights"] = strings.Join(e.Metadata(epub.EPUB_RIGHTS), ", ")
- book["meta"] = strings.Join(e.Metadata(epub.EPUB_META), ", ")
- book["path"] = path
- cover, coverSmall := getCover(e, title)
- book["cover"] = cover
- book["coversmall"] = coverSmall
- book["keywords"] = keywords(book)
-
- db.InsertBook(book)
- return title, nil
-}
-
-func StoreNewFile(name string, file io.Reader) (string, error) {
- path := storePath(name)
- fw, err := os.Create(NEW_PATH + path)
- if err != nil {
- return "", err
- }
- defer fw.Close()
-
- const size = 1024
- var n int = size
- buff := make([]byte, size)
- for n == size {
- n, err = file.Read(buff)
- fw.Write(buff)
- }
- return path, nil
-}
-
-func StoreBook(book Book) (path string, err error) {
- title := book.Title
- path = validFileName(BOOKS_PATH, title, ".epub")
-
- oldPath := NEW_PATH + book.Path
- r, _ := utf8.DecodeRuneInString(title)
- folder := string(r)
- if _, err = os.Stat(BOOKS_PATH + folder); err != nil {
- err = os.Mkdir(BOOKS_PATH+folder, os.ModePerm)
- if err != nil {
- log.Println("Error creating", BOOKS_PATH+folder, ":", err.Error())
- return
- }
- }
- cmd := exec.Command("mv", oldPath, BOOKS_PATH+path)
- err = cmd.Run()
- return
-}
-
-func DeleteBook(book Book) {
- if book.Cover != "" {
- os.RemoveAll(book.Cover[1:])
- }
- if book.CoverSmall != "" {
- os.RemoveAll(book.CoverSmall[1:])
- }
- os.RemoveAll(book.Path)
-}
-
-func validFileName(path string, title string, extension string) string {
- title = strings.Replace(title, "/", "_", -1)
- title = strings.Replace(title, "?", "_", -1)
- title = strings.Replace(title, "#", "_", -1)
- r, _ := utf8.DecodeRuneInString(title)
- folder := string(r)
- file := folder + "/" + title + extension
- _, err := os.Stat(path + file)
- for i := 0; err == nil; i++ {
- file = folder + "/" + title + "_" + strconv.Itoa(i) + extension
- _, err = os.Stat(path + file)
- }
- return file
-}
-
-func storePath(name string) string {
- path := name
- _, err := os.Stat(NEW_PATH + path)
- for i := 0; err == nil; i++ {
- path = strconv.Itoa(i) + "_" + name
- _, err = os.Stat(NEW_PATH + path)
- }
- return path
-}
-
-func cleanStr(str string) string {
- str = strings.Replace(str, "'", "'", -1)
- exp, _ := regexp.Compile("&[^;]*;")
- str = exp.ReplaceAllString(str, "")
- exp, _ = regexp.Compile("[ ,]*$")
- str = exp.ReplaceAllString(str, "")
- return str
-}
-
-func storeImg(img []byte, title, extension string) (string, string) {
- r, _ := utf8.DecodeRuneInString(title)
- folder := string(r)
- if _, err := os.Stat(COVER_PATH + folder); err != nil {
- err = os.Mkdir(COVER_PATH+folder, os.ModePerm)
- if err != nil {
- log.Println("Error creating", COVER_PATH+folder, ":", err.Error())
- return "", ""
- }
- }
- imgPath := validFileName(COVER_PATH, title, extension)
-
- /* store img on disk */
- file, err := os.Create(COVER_PATH + imgPath)
- if err != nil {
- log.Println("Error creating", COVER_PATH+imgPath, ":", err.Error())
- return "", ""
- }
- defer file.Close()
- file.Write(img)
-
- /* resize img */
- resize := append(strings.Split(RESIZE_CMD, " "), COVER_PATH+imgPath, COVER_PATH+imgPath)
- cmd := exec.Command(resize[0], resize[1:]...)
- cmd.Run()
- imgPathSmall := validFileName(COVER_PATH, title, "_small"+extension)
- resize = append(strings.Split(RESIZE_THUMB_CMD, " "), COVER_PATH+imgPath, COVER_PATH+imgPathSmall)
- cmd = exec.Command(resize[0], resize[1:]...)
- cmd.Run()
- return imgPath, imgPathSmall
-}
-
-func getCover(e *epub.Epub, title string) (string, string) {
- /* Try first common names */
- for _, p := range []string{"cover.jpg", "Images/cover.jpg", "cover.jpeg", "cover1.jpg", "cover1.jpeg"} {
- img := e.Data(p)
- if len(img) != 0 {
- return storeImg(img, title, ".jpg")
- }
- }
-
- /* search for img on the text */
- exp, _ := regexp.Compile("]*>")
- str = exp.ReplaceAllString(str, "")
- str = strings.Replace(str, "&", "&", -1)
- str = strings.Replace(str, "<", "<", -1)
- str = strings.Replace(str, ">", ">", -1)
- str = strings.Replace(str, "\\n", "\n", -1)
- return str
-}
-
-func parseSubject(subject []string) []string {
- var res []string
- for _, s := range subject {
- res = append(res, strings.Split(s, " / ")...)
- }
- return res
-}
-
-func parseDate(date []string) string {
- if len(date) == 0 {
- return ""
- }
- return strings.Replace(date[0], "Unspecified: ", "", -1)
-}
-
-func keywords(b map[string]interface{}) (k []string) {
- title, _ := b["title"].(string)
- k = strings.Split(title, " ")
- author, _ := b["author"].([]string)
- for _, a := range author {
- k = append(k, strings.Split(a, " ")...)
- }
- publisher, _ := b["publisher"].(string)
- k = append(k, strings.Split(publisher, " ")...)
- subject, _ := b["subject"].([]string)
- k = append(k, subject...)
- return
-}
diff --git a/tools/update/update.go b/tools/update/update.go
deleted file mode 100644
index ecde711..0000000
--- a/tools/update/update.go
+++ /dev/null
@@ -1,27 +0,0 @@
-package main
-
-import (
- "fmt"
- "git.gitorious.org/go-pkg/epub.git"
- "labix.org/v2/mgo/bson"
-)
-
-func main() {
- db = initDB()
- defer db.Close()
- books, _, _ := db.GetBooks(bson.M{})
-
- for _, book := range books {
- fmt.Println(book.Title)
- e, err := epub.Open(BOOKS_PATH+book.Path, 0)
- if err != nil {
- fmt.Println("================", err)
- }
-
- cover, coverSmall := getCover(e, book.Title)
- if cover != "" {
- db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"cover": cover, "coversmall": coverSmall})
- }
- e.Close()
- }
-}