Merge branch 'master' into user
Conflicts: database.go
This commit is contained in:
commit
5d522da0d2
22 changed files with 1254 additions and 46 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -2,8 +2,10 @@ books/
|
|||
new/
|
||||
cover/
|
||||
trantor
|
||||
.*.swp
|
||||
tools/adduser/adduser
|
||||
tools/update/update
|
||||
tools/togridfs/togridfs
|
||||
tools/getISBNnDesc/getISBNnDesc
|
||||
tools/coverNew/coverNew
|
||||
tags
|
||||
.*.swp
|
||||
|
|
|
@ -14,6 +14,7 @@ const (
|
|||
DAILY_VISITS_COLL = "visits.daily"
|
||||
MONTHLY_VISITS_COLL = "visits.monthly"
|
||||
USERS_COLL = "users"
|
||||
NEWS_COLL = "news"
|
||||
STATS_COLL = "statistics"
|
||||
FS_BOOKS = "fs_books"
|
||||
FS_IMGS = "fs_imgs"
|
||||
|
@ -28,6 +29,8 @@ const (
|
|||
TAGS_DISPLAY = 50
|
||||
SEARCH_ITEMS_PAGE = 20
|
||||
NEW_ITEMS_PAGE = 50
|
||||
NUM_NEWS = 10
|
||||
DAYS_NEWS_INDEXPAGE = 15
|
||||
|
||||
TEMPLATE_PATH = "templates/"
|
||||
CSS_PATH = "css/"
|
||||
|
|
27
cover.go
27
cover.go
|
@ -64,13 +64,18 @@ func coverHandler(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
func GetCover(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
||||
imgId, smallId := searchCommonCoverNames(e, title)
|
||||
imgId, smallId := coverFromMetadata(e, title)
|
||||
if imgId != "" {
|
||||
return imgId, smallId
|
||||
}
|
||||
|
||||
imgId, smallId = searchCommonCoverNames(e, title)
|
||||
if imgId != "" {
|
||||
return imgId, smallId
|
||||
}
|
||||
|
||||
/* search for img on the text */
|
||||
exp, _ := regexp.Compile("<ima?g.*[(src)(href)]=[\"']([^\"']*(\\.[^\\.\"']*))[\"']")
|
||||
exp, _ := regexp.Compile("<.*ima?g.*[(src)(href)]=[\"']([^\"']*(\\.[^\\.\"']*))[\"']")
|
||||
it, errNext := e.Spine()
|
||||
for errNext == nil {
|
||||
file, err := it.Open()
|
||||
|
@ -86,7 +91,7 @@ func GetCover(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
|||
res := exp.FindSubmatch(txt)
|
||||
if res != nil {
|
||||
href := string(res[1])
|
||||
urlPart := strings.Split(it.Url(), "/")
|
||||
urlPart := strings.Split(it.URL(), "/")
|
||||
url := strings.Join(urlPart[:len(urlPart)-1], "/")
|
||||
if href[:3] == "../" {
|
||||
href = href[3:]
|
||||
|
@ -113,8 +118,22 @@ func GetCover(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
|||
return "", ""
|
||||
}
|
||||
|
||||
func coverFromMetadata(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
||||
metaList, _ := e.MetadataAttr("meta")
|
||||
for _, meta := range metaList {
|
||||
if meta["name"] == "cover" {
|
||||
img, err := e.OpenFileId(meta["content"])
|
||||
if err == nil {
|
||||
defer img.Close()
|
||||
return storeImg(img, title)
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", ""
|
||||
}
|
||||
|
||||
func searchCommonCoverNames(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
||||
for _, p := range []string{"cover.jpg", "Images/cover.jpg", "cover.jpeg", "cover1.jpg", "cover1.jpeg"} {
|
||||
for _, p := range []string{"cover.jpg", "Images/cover.jpg", "images/cover.jpg", "cover.jpeg", "cover1.jpg", "cover1.jpeg"} {
|
||||
img, err := e.OpenFile(p)
|
||||
if err == nil {
|
||||
defer img.Close()
|
||||
|
|
26
database.go
26
database.go
|
@ -34,10 +34,16 @@ type Book struct {
|
|||
Keywords []string
|
||||
}
|
||||
|
||||
type News struct {
|
||||
Date time.Time
|
||||
Text string
|
||||
}
|
||||
|
||||
type DB struct {
|
||||
session *mgo.Session
|
||||
books *mgo.Collection
|
||||
user *mgo.Collection
|
||||
news *mgo.Collection
|
||||
stats *mgo.Collection
|
||||
mr *MR
|
||||
}
|
||||
|
@ -53,6 +59,7 @@ func initDB() *DB {
|
|||
database := d.session.DB(DB_NAME)
|
||||
d.books = database.C(BOOKS_COLL)
|
||||
d.user = database.C(USERS_COLL)
|
||||
d.news = database.C(NEWS_COLL)
|
||||
d.stats = database.C(STATS_COLL)
|
||||
d.mr = NewMR(database)
|
||||
return d
|
||||
|
@ -94,6 +101,25 @@ func (d *DB) UserRole(user string) string {
|
|||
return res.Role
|
||||
}
|
||||
|
||||
func (d *DB) AddNews(text string) error {
|
||||
var news News
|
||||
news.Text = text
|
||||
news.Date = time.Now()
|
||||
return d.news.Insert(news)
|
||||
}
|
||||
|
||||
func (d *DB) GetNews(num int, days int) (news []News, err error) {
|
||||
query := bson.M{}
|
||||
if days != 0 {
|
||||
duration := time.Duration(-24*days) * time.Hour
|
||||
date := time.Now().Add(duration)
|
||||
query = bson.M{"date": bson.M{"$gt": date}}
|
||||
}
|
||||
q := d.news.Find(query).Sort("-date").Limit(num)
|
||||
err = q.All(&news)
|
||||
return
|
||||
}
|
||||
|
||||
func (d *DB) InsertStats(stats interface{}) error {
|
||||
return d.stats.Insert(stats)
|
||||
}
|
||||
|
|
56
news.go
Normal file
56
news.go
Normal file
|
@ -0,0 +1,56 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type newsData struct {
|
||||
S Status
|
||||
News []newsEntry
|
||||
}
|
||||
|
||||
type newsEntry struct {
|
||||
Date string
|
||||
Text string
|
||||
}
|
||||
|
||||
func newsHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
||||
var data newsData
|
||||
data.S = GetStatus(w, r)
|
||||
data.S.News = true
|
||||
data.News = getNews(NUM_NEWS, 0)
|
||||
loadTemplate(w, "news", data)
|
||||
}
|
||||
|
||||
func editNewsHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
||||
if sess.User == "" {
|
||||
notFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
var data statusData
|
||||
data.S = GetStatus(w, r)
|
||||
data.S.News = true
|
||||
loadTemplate(w, "edit_news", data)
|
||||
}
|
||||
|
||||
func postNewsHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
||||
if sess.User == "" {
|
||||
notFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
text := r.FormValue("text")
|
||||
db.AddNews(text)
|
||||
http.Redirect(w, r, "/news/", http.StatusFound)
|
||||
}
|
||||
|
||||
func getNews(num int, days int) []newsEntry {
|
||||
dbnews, _ := db.GetNews(num, days)
|
||||
news := make([]newsEntry, len(dbnews))
|
||||
for i, n := range dbnews {
|
||||
news[i].Text = n.Text
|
||||
news[i].Date = n.Date.Format("Jan 2, 2006")
|
||||
}
|
||||
return news
|
||||
}
|
10
reader.go
10
reader.go
|
@ -65,10 +65,10 @@ func getNextPrev(e *epubgo.Epub, file string, id string, base string) (string, s
|
|||
prev := ""
|
||||
next := ""
|
||||
for err == nil {
|
||||
if cleanLink(spine.Url()) == file {
|
||||
if cleanLink(spine.URL()) == file {
|
||||
break
|
||||
}
|
||||
prev = spine.Url()
|
||||
prev = spine.URL()
|
||||
err = spine.Next()
|
||||
}
|
||||
if err != nil {
|
||||
|
@ -79,7 +79,7 @@ func getNextPrev(e *epubgo.Epub, file string, id string, base string) (string, s
|
|||
prev = genLink(id, base, prev)
|
||||
}
|
||||
if spine.Next() == nil {
|
||||
next = genLink(id, base, spine.Url())
|
||||
next = genLink(id, base, spine.URL())
|
||||
}
|
||||
return next, prev
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ func listChapters(nav *epubgo.NavigationIterator, depth int) []chapter {
|
|||
for err == nil {
|
||||
var c chapter
|
||||
c.Label = nav.Title()
|
||||
c.Link = nav.Url()
|
||||
c.Link = nav.URL()
|
||||
c.Depth = depth
|
||||
for c.Depth < depth {
|
||||
c.Out = append(c.Out, true)
|
||||
|
@ -143,7 +143,7 @@ func readStartHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
|||
notFound(w, r)
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/read/"+id+"/"+it.Url(), http.StatusTemporaryRedirect)
|
||||
http.Redirect(w, r, "/read/"+id+"/"+it.URL(), http.StatusTemporaryRedirect)
|
||||
}
|
||||
|
||||
func readHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
||||
|
|
|
@ -11,6 +11,7 @@ type Status struct {
|
|||
Notif []Notification
|
||||
Home bool
|
||||
About bool
|
||||
News bool
|
||||
Upload bool
|
||||
Stats bool
|
||||
Help bool
|
||||
|
@ -30,6 +31,8 @@ var templates = template.Must(template.ParseFiles(TEMPLATE_PATH+"header.html",
|
|||
TEMPLATE_PATH+"404.html",
|
||||
TEMPLATE_PATH+"index.html",
|
||||
TEMPLATE_PATH+"about.html",
|
||||
TEMPLATE_PATH+"news.html",
|
||||
TEMPLATE_PATH+"edit_news.html",
|
||||
TEMPLATE_PATH+"book.html",
|
||||
TEMPLATE_PATH+"search.html",
|
||||
TEMPLATE_PATH+"upload.html",
|
||||
|
|
|
@ -6,4 +6,73 @@
|
|||
|
||||
<p>We like to pay the authors, but not the corporations that make profit from them. We won't listen to any content remove request from corporations, editorials, right management organizations or any other blood-suckers.<p>
|
||||
|
||||
<h4>Status</h4>
|
||||
|
||||
<p>The Imperial Library of Trantor it's in beta-status. We are working to provide a good user experience, but it's still in early development.</p>
|
||||
|
||||
<p><b>Any help is welcome</b>. You can write us comments to our email address (zenow@tormail.org), upload your epubs, download our <a href="https://gitorious.org/trantor">source code</a> hack it and send us patches, ...</p>
|
||||
|
||||
<h4>Donations</h4>
|
||||
|
||||
<p>If you feel like donate some bitcoins we'll gladly accept them. You can request one bitcoin key for you or use our public bitcoin key:</p>
|
||||
<p class="centered text-success">1JioYbSYDH4JQYbhF7tX2kGUVZc2vzvugx</p>
|
||||
|
||||
<h4>PGP</h4>
|
||||
|
||||
<p>You can use pgp to write me emails, use my key:</p>
|
||||
<pre class="pre-scrollable span10">
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
Version: GnuPG v1.4.12 (GNU/Linux)
|
||||
|
||||
mQINBFHkelgBEADG3Fh5MQTIlDI361//JoGtd+PiScLutyWAsuNICuWUwCSPSV9/
|
||||
tbYeptq6nawS/asu0Qzs6T99SqHypCq8WGbhBkHZaVPxBdnrfm7JLBfTeqCPMIm3
|
||||
KKeq2ipOg9bwUopEqYvpf85b7PnKRn1KpRj/Qz8majuihnFiLNHeRXnrYr89bWE8
|
||||
Xy45HPwngwvUFkZpE1ttBHGrDgb4VugnDiwRHyHG1MPl7k2YaZcKA5R+McaWuQZp
|
||||
MONs0/HFGR1PgRa/Kx5xCuGZe28PSgyQUF6FZbQjOR4IJoy5mqVy2OX43BggigHU
|
||||
/ISRQrYefs4jbBX6s2fT4lvgw4SSEQXPrSIw+E3xnPJk1OooniT6IKp9FtOGo2mb
|
||||
zCSiFm7Et97cpgo/FxANQr6DryEMeW+B68oZ0WrkeQZqKjXOloNwUUYizN+p7OsW
|
||||
K+k/VgRmqYerrXXqrdd73cuWjVx16c1SxiUoqV0LDoSVwlfpppS2WH3gg5xdjtAB
|
||||
7tsj3Q7OBejANYAvvE7XsWD+pg4Sy96htMAf+PVZipn03qoO9msb0MTHSzmjtNfJ
|
||||
bV19HjzKv3hzFG9qEh0g9Uc/Oyni8Q79ugwni54bYs38WjC8qQBTj0y3lkRE4JSP
|
||||
2zNaSubDvbSBOcncYh0m60b2NMIsXQoHHOMdLL1wMwLn91SBw1y25teV1QARAQAB
|
||||
tDtMYXMgWmVub3cgKEltcGVyaWFsIExpYnJhcnkgb2YgVHJhbnRvcikgPHplbm93
|
||||
QHRvcm1haWwub3JnPokCOAQTAQIAIgUCUeR6WAIbAwYLCQgHAwIGFQgCCQoLBBYC
|
||||
AwECHgECF4AACgkQRPcNrYeH/dewNA//dtATSYKwpt6itGQoZslqdaYbbxuhCWWz
|
||||
NA7VKATp1gQNhnnIJW8F/KziC39eKGGnSScS9La5Lazuj+CjV5SpXvlLFxfCr+vI
|
||||
G9PDEbgpCg1hmJTIAewVn3Xpx10F0fLmRw/DkeSdqECVeAT0WTIptHZh9M13fpa7
|
||||
urkewDaDLfQquASSkFUspAgUwJJfR0cQCO0d5XhRg2v9hz4pngrgwioDZKPa610v
|
||||
AuCSgrEgQF1D6YRfa3ePsWIHhKO1n1Kv2Qz9NNgdi5FQQOXKbttIU416VnHEnL6k
|
||||
RwtzRKE/K6KzCAtP4vwJaiJa4euGMFWO2kp608zCC11Ne6Bbszzt95QzKG9fKE80
|
||||
BwB4NFm+GZxJtk+nutF5XUvogl1+ogLyjCZxS4UNdnF2zb4hYvOoDv7GDD5I1/po
|
||||
MSp6ZetbIggWFGfMMh7Fe8HvqPLbV5H3hKQqx0zvzntROhs1a/EdfB1I0CfX75Ib
|
||||
KdxgBJ3ajFNtZc3z8m5IlOE2RAuKZ0zSVRl7cCX1D5tYsqRIGy4cvWJk5nKj5AMV
|
||||
101i/N26Fq1X/MOO5Cc2UKkLoZNv0PFMXwuoQx4iY4Y3KREz+08KYn4iM9u2d1Kq
|
||||
6icCWQ1aBQy84AQ9LQT5OwOBQltLKYaaucYciD28Fn32rDF1nfzH8m1zpNsRWTSq
|
||||
qj1kSZNJ5Xa5Ag0EUeR6WAEQALDfO5pnjJtVGAdqYaPbrf9fVnW5/RfYtLUrps8d
|
||||
UAduk6QFuCItb8OnuUDVqW08PEX5KcxA43wXLaO7Uvb3smBsWJOkOCwOgpEbTxnh
|
||||
98l6FnXLpMBkshh2jhsfWIKU3CH4qQNUYGc7MzXkMEeHZsJD5xvMauAnfarSFJLJ
|
||||
ij52D2Gm53kLcJjz/bn80qb6XhknFvQ3At5mEeUSRF73LOeTHc8pApIBn+aIUTXo
|
||||
8y5pPirr4k5aRc+cg06MNhjsvzyhzR7gT2r2QJjeuWGWA2Tl7xDudQwqZs69CMFd
|
||||
FZd9j/cOeTWwNtRZqstQ+jWBvP81T++MkTEsz9TOVZxSUcQ99qTQSGbH41KhlPDm
|
||||
mxD24xOA4RCMAKdHOELaU0wqb5DAHtJcJHEihczAaObzg8A3935vXob84UAqCcpG
|
||||
eIjegdueWkbgJECq+a0RFfiAbTkigDgvzDS45MkcCbSW9TSqzNhDFPGnrFDGlajq
|
||||
pCX8TZuzhOc5NupQo35h46z5pMCgNoPf2Jma771j2NJg73R/bjAo5wPdMlrMF0W5
|
||||
hckRTnm72Oeh87qXfwS7kgUSiXs2YLo869QAGbKw0IlMNCzDQ9t6/2IIKoCjcwLV
|
||||
c6tIYklUxz4N0S0ZQouZ0w84G+IyY2MyRKSRoky3fQLRIsNTBkWkVPl7IUhpXlPx
|
||||
9RYBABEBAAGJAh8EGAECAAkFAlHkelgCGwwACgkQRPcNrYeH/dd+QQ//WAvnIupJ
|
||||
psVkYn8CCld+3hUKohJhBQyKjQEgX04cH9jJlIBxkDqktFeUNQ/lwyG7Xu6wSSOv
|
||||
caGj+JsA9sJbvSv8ypMzxtbNEuPPvbUCQIprkkg9P46XTTUp3R5mWZ1bkxinODOd
|
||||
BcG8fezhwVG5D1x9eN9KtmoOyoXbZT6Lj1ngkLTBEu4qKXEmaBMrBtAebv1Jp+nl
|
||||
h63kH2e5JCgTAu1110Pp7oRwfv9b+LeKDyoR6BVgJbFUmDlZykg1JZkUCA3UkO5S
|
||||
xLDpTauc+dZ7iEBYE7MfHPNGYjk/1qd9us4a9ZdyFHC0FWctcPCkNBYtkEyF5I5S
|
||||
NUV3bJ5u3H+n9OTFCrZCWUuYjZ81ZK4PwvP9DWDirTG0pf2ilzi846NzN+cKQZVH
|
||||
KGW5abAAFVANWsNHaVfSdiH9zcsWru9li7Z4YalVkTBLDVg5xqJB0IB+9QkC2f8m
|
||||
eD5+TdHe3d9Ws+aTb55LY7QoV5yDK0k9VRn0jx5YU6rJPoeIz+ItRqyPNC6yGOb1
|
||||
mchroGN+i91cG0+sarVr26Xn4asasEOuPrmwH2YVBpzzN2bn1g3Z9pzb0Rmp0TpD
|
||||
Y5iwX9isk7J5yNkCl1bae0U5ztVtxB7ghXDUIVFalYvT6xn3p9t8iD3NsbML7IQ0
|
||||
IXsSLZzM+UxPlUZpEqSH0qllMrtZAR4Xvvk=
|
||||
=KPVB
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
</pre>
|
||||
|
||||
{{template "footer.html"}}
|
||||
|
|
9
templates/edit_news.html
Normal file
9
templates/edit_news.html
Normal file
|
@ -0,0 +1,9 @@
|
|||
{{template "header.html" .S}}
|
||||
|
||||
<h4>Add News:</h4>
|
||||
<form method="POST" action="/news/edit">
|
||||
<textarea class="field span10" name="text" rows="3"></textarea> <br />
|
||||
<button type="submit" class="btn">Add</button>
|
||||
</form>
|
||||
|
||||
{{template "footer.html"}}
|
|
@ -52,7 +52,8 @@
|
|||
<ul class="nav">
|
||||
<li {{if .Home}}class="active"{{end}}><a href="/">Home</a></li>
|
||||
<li {{if .About}}class="active"{{end}}><a href="/about/">About</a></li>
|
||||
<li {{if .Upload}}class="active"{{end}}><a href="/upload/">Upload your epub</a></li>
|
||||
<li {{if .News}}class="active"{{end}}><a href="/news/">News</a></li>
|
||||
<li {{if .Upload}}class="active"{{end}}><a href="/upload/">Upload</a></li>
|
||||
<li {{if .Stats}}class="active"{{end}}><a href="/stats/">Statistics</a></li>
|
||||
</ul>
|
||||
|
||||
|
@ -65,6 +66,8 @@
|
|||
</a>
|
||||
<ul class="dropdown-menu">
|
||||
<li><a href="/new/"><i class="icon-book"></i> New books</a></li>
|
||||
<li><a href="/news/edit"><i class="icon-certificate"></i> Edit news</a></li>
|
||||
<li class="divider"></li>
|
||||
<li><a href="/settings/"><i class="icon-wrench"></i> Settings</a></li>
|
||||
<li class="divider"></li>
|
||||
<li><a href="/logout/"><i class="icon-off"></i> Log Out</a></li>
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
{{template "header.html" .S}}
|
||||
|
||||
{{range .News}}
|
||||
<div class="offset2 span8 alert alert-info">
|
||||
<a href="/news/"><strong>News!</strong></a> {{.Text}}
|
||||
</div>
|
||||
{{end}}
|
||||
|
||||
<div class="row">
|
||||
<div class="span8">
|
||||
<h4>Last books added:</h4>
|
||||
|
|
14
templates/news.html
Normal file
14
templates/news.html
Normal file
|
@ -0,0 +1,14 @@
|
|||
{{template "header.html" .S}}
|
||||
|
||||
<h4>News:</h4>
|
||||
|
||||
<dl class="dl-horizontal">
|
||||
{{range .News}}
|
||||
<div class="well well-small">
|
||||
<dt>{{.Date}}</dt>
|
||||
<dd>{{.Text}}</dd>
|
||||
</div>
|
||||
{{end}}
|
||||
</dl>
|
||||
|
||||
{{template "footer.html"}}
|
|
@ -1,4 +1,4 @@
|
|||
Some tools dirty to manage trantor:
|
||||
Some dirty tools to manage trantor:
|
||||
|
||||
- adduser. Used to add users to trantor:
|
||||
$ adduser myNick
|
||||
|
@ -9,3 +9,5 @@ Password:
|
|||
- togridfs (23/4/2013). Migrate all files and covers to gridfs
|
||||
|
||||
- getISBNnDesc (31/5/2013). Import the ISBN and the description with changes of lines to the database
|
||||
|
||||
- coverNew. Reload the cover from all the new books
|
||||
|
|
45
tools/coverNew/config.go
Normal file
45
tools/coverNew/config.go
Normal file
|
@ -0,0 +1,45 @@
|
|||
package main
|
||||
|
||||
const (
|
||||
PORT = "8080"
|
||||
|
||||
DB_IP = "127.0.0.1"
|
||||
DB_NAME = "trantor"
|
||||
META_COLL = "meta"
|
||||
BOOKS_COLL = "books"
|
||||
TAGS_COLL = "tags"
|
||||
VISITED_COLL = "visited"
|
||||
DOWNLOADED_COLL = "downloaded"
|
||||
HOURLY_VISITS_COLL = "visits.hourly"
|
||||
DAILY_VISITS_COLL = "visits.daily"
|
||||
MONTHLY_VISITS_COLL = "visits.monthly"
|
||||
USERS_COLL = "users"
|
||||
NEWS_COLL = "news"
|
||||
STATS_COLL = "statistics"
|
||||
FS_BOOKS = "fs_books"
|
||||
FS_IMGS = "fs_imgs"
|
||||
|
||||
PASS_SALT = "ImperialLibSalt"
|
||||
MINUTES_UPDATE_TAGS = 11
|
||||
MINUTES_UPDATE_VISITED = 41
|
||||
MINUTES_UPDATE_DOWNLOADED = 47
|
||||
MINUTES_UPDATE_HOURLY = 31
|
||||
MINUTES_UPDATE_DAILY = 60*12 + 7
|
||||
MINUTES_UPDATE_MONTHLY = 60*24 + 11
|
||||
TAGS_DISPLAY = 50
|
||||
SEARCH_ITEMS_PAGE = 20
|
||||
NEW_ITEMS_PAGE = 50
|
||||
NUM_NEWS = 10
|
||||
DAYS_NEWS_INDEXPAGE = 15
|
||||
|
||||
TEMPLATE_PATH = "templates/"
|
||||
CSS_PATH = "css/"
|
||||
JS_PATH = "js/"
|
||||
IMG_PATH = "img/"
|
||||
|
||||
IMG_WIDTH_BIG = 300
|
||||
IMG_WIDTH_SMALL = 60
|
||||
IMG_QUALITY = 80
|
||||
|
||||
CHAN_SIZE = 100
|
||||
)
|
204
tools/coverNew/cover.go
Normal file
204
tools/coverNew/cover.go
Normal file
|
@ -0,0 +1,204 @@
|
|||
package main
|
||||
|
||||
import _ "image/png"
|
||||
import _ "image/jpeg"
|
||||
import _ "image/gif"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"git.gitorious.org/go-pkg/epubgo.git"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/nfnt/resize"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"labix.org/v2/mgo"
|
||||
"labix.org/v2/mgo/bson"
|
||||
"log"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func coverHandler(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
if !bson.IsObjectIdHex(vars["id"]) {
|
||||
notFound(w, r)
|
||||
return
|
||||
}
|
||||
id := bson.ObjectIdHex(vars["id"])
|
||||
books, _, err := db.GetBooks(bson.M{"_id": id})
|
||||
if err != nil || len(books) == 0 {
|
||||
notFound(w, r)
|
||||
return
|
||||
}
|
||||
book := books[0]
|
||||
|
||||
if !book.Active {
|
||||
sess := GetSession(r)
|
||||
if sess.User == "" {
|
||||
notFound(w, r)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
fs := db.GetFS(FS_IMGS)
|
||||
var f *mgo.GridFile
|
||||
if vars["size"] == "small" {
|
||||
f, err = fs.OpenId(book.CoverSmall)
|
||||
} else {
|
||||
f, err = fs.OpenId(book.Cover)
|
||||
}
|
||||
if err != nil {
|
||||
log.Println("Error while opening image:", err)
|
||||
notFound(w, r)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
headers := w.Header()
|
||||
headers["Content-Type"] = []string{"image/jpeg"}
|
||||
|
||||
io.Copy(w, f)
|
||||
}
|
||||
|
||||
func GetCover(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
||||
imgId, smallId := coverFromMetadata(e, title)
|
||||
if imgId != "" {
|
||||
return imgId, smallId
|
||||
}
|
||||
|
||||
imgId, smallId = searchCommonCoverNames(e, title)
|
||||
if imgId != "" {
|
||||
return imgId, smallId
|
||||
}
|
||||
|
||||
/* search for img on the text */
|
||||
exp, _ := regexp.Compile("<.*ima?g.*[(src)(href)]=[\"']([^\"']*(\\.[^\\.\"']*))[\"']")
|
||||
it, errNext := e.Spine()
|
||||
for errNext == nil {
|
||||
file, err := it.Open()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
txt, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
res := exp.FindSubmatch(txt)
|
||||
if res != nil {
|
||||
href := string(res[1])
|
||||
urlPart := strings.Split(it.URL(), "/")
|
||||
url := strings.Join(urlPart[:len(urlPart)-1], "/")
|
||||
if href[:3] == "../" {
|
||||
href = href[3:]
|
||||
url = strings.Join(urlPart[:len(urlPart)-2], "/")
|
||||
}
|
||||
href = strings.Replace(href, "%20", " ", -1)
|
||||
href = strings.Replace(href, "%27", "'", -1)
|
||||
href = strings.Replace(href, "%28", "(", -1)
|
||||
href = strings.Replace(href, "%29", ")", -1)
|
||||
if url == "" {
|
||||
url = href
|
||||
} else {
|
||||
url = url + "/" + href
|
||||
}
|
||||
|
||||
img, err := e.OpenFile(url)
|
||||
if err == nil {
|
||||
defer img.Close()
|
||||
return storeImg(img, title)
|
||||
}
|
||||
}
|
||||
errNext = it.Next()
|
||||
}
|
||||
return "", ""
|
||||
}
|
||||
|
||||
func coverFromMetadata(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
||||
metaList, _ := e.MetadataAttr("meta")
|
||||
for _, meta := range metaList {
|
||||
if meta["name"] == "cover" {
|
||||
img, err := e.OpenFileId(meta["content"])
|
||||
if err == nil {
|
||||
defer img.Close()
|
||||
return storeImg(img, title)
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", ""
|
||||
}
|
||||
|
||||
func searchCommonCoverNames(e *epubgo.Epub, title string) (bson.ObjectId, bson.ObjectId) {
|
||||
for _, p := range []string{"cover.jpg", "Images/cover.jpg", "images/cover.jpg", "cover.jpeg", "cover1.jpg", "cover1.jpeg"} {
|
||||
img, err := e.OpenFile(p)
|
||||
if err == nil {
|
||||
defer img.Close()
|
||||
return storeImg(img, title)
|
||||
}
|
||||
}
|
||||
return "", ""
|
||||
}
|
||||
|
||||
func storeImg(img io.Reader, title string) (bson.ObjectId, bson.ObjectId) {
|
||||
/* open the files */
|
||||
fBig, err := createCoverFile(title)
|
||||
if err != nil {
|
||||
log.Println("Error creating", title, ":", err.Error())
|
||||
return "", ""
|
||||
}
|
||||
defer fBig.Close()
|
||||
|
||||
fSmall, err := createCoverFile(title + "_small")
|
||||
if err != nil {
|
||||
log.Println("Error creating", title+"_small", ":", err.Error())
|
||||
return "", ""
|
||||
}
|
||||
defer fSmall.Close()
|
||||
|
||||
/* resize img */
|
||||
var img2 bytes.Buffer
|
||||
img1 := io.TeeReader(img, &img2)
|
||||
jpgOptions := jpeg.Options{IMG_QUALITY}
|
||||
imgResized, err := resizeImg(img1, IMG_WIDTH_BIG)
|
||||
if err != nil {
|
||||
log.Println("Error resizing big image:", err.Error())
|
||||
return "", ""
|
||||
}
|
||||
err = jpeg.Encode(fBig, imgResized, &jpgOptions)
|
||||
if err != nil {
|
||||
log.Println("Error encoding big image:", err.Error())
|
||||
return "", ""
|
||||
}
|
||||
imgSmallResized, err := resizeImg(&img2, IMG_WIDTH_SMALL)
|
||||
if err != nil {
|
||||
log.Println("Error resizing small image:", err.Error())
|
||||
return "", ""
|
||||
}
|
||||
err = jpeg.Encode(fSmall, imgSmallResized, &jpgOptions)
|
||||
if err != nil {
|
||||
log.Println("Error encoding small image:", err.Error())
|
||||
return "", ""
|
||||
}
|
||||
|
||||
idBig, _ := fBig.Id().(bson.ObjectId)
|
||||
idSmall, _ := fSmall.Id().(bson.ObjectId)
|
||||
return idBig, idSmall
|
||||
}
|
||||
|
||||
func createCoverFile(title string) (*mgo.GridFile, error) {
|
||||
fs := db.GetFS(FS_IMGS)
|
||||
return fs.Create(title + ".jpg")
|
||||
}
|
||||
|
||||
func resizeImg(imgReader io.Reader, width uint) (image.Image, error) {
|
||||
img, _, err := image.Decode(imgReader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return resize.Resize(width, 0, img, resize.NearestNeighbor), nil
|
||||
}
|
31
tools/coverNew/coverNew.go
Normal file
31
tools/coverNew/coverNew.go
Normal file
|
@ -0,0 +1,31 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"labix.org/v2/mgo/bson"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
func main() {
|
||||
db = initDB()
|
||||
defer db.Close()
|
||||
books, _, _ := db.GetNewBooks()
|
||||
|
||||
for _, book := range books {
|
||||
fmt.Println(book.Title)
|
||||
e, err := OpenBook(book.File)
|
||||
if err != nil {
|
||||
fmt.Println("================", err)
|
||||
}
|
||||
|
||||
cover, coverSmall := GetCover(e, book.Title)
|
||||
if cover != "" {
|
||||
db.UpdateBook(bson.ObjectIdHex(book.Id), bson.M{"cover": cover, "coversmall": coverSmall})
|
||||
}
|
||||
e.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func notFound(w http.ResponseWriter, r *http.Request) {
|
||||
// cover.go needs this function to compile
|
||||
}
|
230
tools/coverNew/database.go
Normal file
230
tools/coverNew/database.go
Normal file
|
@ -0,0 +1,230 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"labix.org/v2/mgo"
|
||||
"labix.org/v2/mgo/bson"
|
||||
"time"
|
||||
)
|
||||
|
||||
var db *DB
|
||||
|
||||
type Book struct {
|
||||
Id string `bson:"_id"`
|
||||
Title string
|
||||
Author []string
|
||||
Contributor string
|
||||
Publisher string
|
||||
Description string
|
||||
Subject []string
|
||||
Date string
|
||||
Lang []string
|
||||
Isbn string
|
||||
Type string
|
||||
Format string
|
||||
Source string
|
||||
Relation string
|
||||
Coverage string
|
||||
Rights string
|
||||
Meta string
|
||||
File bson.ObjectId
|
||||
Cover bson.ObjectId
|
||||
CoverSmall bson.ObjectId
|
||||
Active bool
|
||||
Keywords []string
|
||||
}
|
||||
|
||||
type News struct {
|
||||
Date time.Time
|
||||
Text string
|
||||
}
|
||||
|
||||
type DB struct {
|
||||
session *mgo.Session
|
||||
books *mgo.Collection
|
||||
user *mgo.Collection
|
||||
news *mgo.Collection
|
||||
stats *mgo.Collection
|
||||
mr *MR
|
||||
}
|
||||
|
||||
func initDB() *DB {
|
||||
var err error
|
||||
d := new(DB)
|
||||
d.session, err = mgo.Dial(DB_IP)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
database := d.session.DB(DB_NAME)
|
||||
d.books = database.C(BOOKS_COLL)
|
||||
d.user = database.C(USERS_COLL)
|
||||
d.news = database.C(NEWS_COLL)
|
||||
d.stats = database.C(STATS_COLL)
|
||||
d.mr = NewMR(database)
|
||||
return d
|
||||
}
|
||||
|
||||
func (d *DB) Close() {
|
||||
d.session.Close()
|
||||
}
|
||||
|
||||
func md5Pass(pass string) []byte {
|
||||
h := md5.New()
|
||||
hash := h.Sum(([]byte)(PASS_SALT + pass))
|
||||
return hash
|
||||
}
|
||||
|
||||
func (d *DB) SetPassword(user string, pass string) error {
|
||||
hash := md5Pass(pass)
|
||||
return d.user.Update(bson.M{"user": user}, bson.M{"$set": bson.M{"pass": hash}})
|
||||
}
|
||||
|
||||
func (d *DB) UserValid(user string, pass string) bool {
|
||||
hash := md5Pass(pass)
|
||||
n, err := d.user.Find(bson.M{"user": user, "pass": hash}).Count()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return n != 0
|
||||
}
|
||||
|
||||
func (d *DB) AddNews(text string) error {
|
||||
var news News
|
||||
news.Text = text
|
||||
news.Date = time.Now()
|
||||
return d.news.Insert(news)
|
||||
}
|
||||
|
||||
func (d *DB) GetNews(num int, days int) (news []News, err error) {
|
||||
query := bson.M{}
|
||||
if days != 0 {
|
||||
duration := time.Duration(-24*days) * time.Hour
|
||||
date := time.Now().Add(duration)
|
||||
query = bson.M{"date": bson.M{"$gt": date}}
|
||||
}
|
||||
q := d.news.Find(query).Sort("-date").Limit(num)
|
||||
err = q.All(&news)
|
||||
return
|
||||
}
|
||||
|
||||
func (d *DB) InsertStats(stats interface{}) error {
|
||||
return d.stats.Insert(stats)
|
||||
}
|
||||
|
||||
func (d *DB) InsertBook(book interface{}) error {
|
||||
return d.books.Insert(book)
|
||||
}
|
||||
|
||||
func (d *DB) RemoveBook(id bson.ObjectId) error {
|
||||
return d.books.Remove(bson.M{"_id": id})
|
||||
}
|
||||
|
||||
func (d *DB) UpdateBook(id bson.ObjectId, data interface{}) error {
|
||||
return d.books.Update(bson.M{"_id": id}, bson.M{"$set": data})
|
||||
}
|
||||
|
||||
/* optional parameters: length and start index
|
||||
*
|
||||
* Returns: list of books, number found and err
|
||||
*/
|
||||
func (d *DB) GetBooks(query bson.M, r ...int) (books []Book, num int, err error) {
|
||||
var start, length int
|
||||
if len(r) > 0 {
|
||||
length = r[0]
|
||||
if len(r) > 1 {
|
||||
start = r[1]
|
||||
}
|
||||
}
|
||||
q := d.books.Find(query).Sort("-_id")
|
||||
num, err = q.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if start != 0 {
|
||||
q = q.Skip(start)
|
||||
}
|
||||
if length != 0 {
|
||||
q = q.Limit(length)
|
||||
}
|
||||
|
||||
err = q.All(&books)
|
||||
for i, b := range books {
|
||||
books[i].Id = bson.ObjectId(b.Id).Hex()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
/* Get the most visited books
|
||||
*/
|
||||
func (d *DB) GetVisitedBooks(num int) (books []Book, err error) {
|
||||
bookId, err := d.mr.GetMostVisited(num, d.stats)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
books = make([]Book, num)
|
||||
for i, id := range bookId {
|
||||
d.books.Find(bson.M{"_id": id}).One(&books[i])
|
||||
books[i].Id = bson.ObjectId(books[i].Id).Hex()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
/* Get the most downloaded books
|
||||
*/
|
||||
func (d *DB) GetDownloadedBooks(num int) (books []Book, err error) {
|
||||
bookId, err := d.mr.GetMostDownloaded(num, d.stats)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
books = make([]Book, num)
|
||||
for i, id := range bookId {
|
||||
d.books.Find(bson.M{"_id": id}).One(&books[i])
|
||||
books[i].Id = bson.ObjectId(books[i].Id).Hex()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
/* optional parameters: length and start index
|
||||
*
|
||||
* Returns: list of books, number found and err
|
||||
*/
|
||||
func (d *DB) GetNewBooks(r ...int) (books []Book, num int, err error) {
|
||||
return d.GetBooks(bson.M{"$nor": []bson.M{{"active": true}}}, r...)
|
||||
}
|
||||
|
||||
func (d *DB) BookActive(id bson.ObjectId) bool {
|
||||
var book Book
|
||||
err := d.books.Find(bson.M{"_id": id}).One(&book)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return book.Active
|
||||
}
|
||||
|
||||
func (d *DB) GetFS(prefix string) *mgo.GridFS {
|
||||
return d.session.DB(DB_NAME).GridFS(prefix)
|
||||
}
|
||||
|
||||
func (d *DB) GetTags(numTags int) ([]string, error) {
|
||||
return d.mr.GetTags(numTags, d.books)
|
||||
}
|
||||
|
||||
type Visits struct {
|
||||
Date int64 "_id"
|
||||
Count int "value"
|
||||
}
|
||||
|
||||
func (d *DB) GetHourVisits(start time.Time) ([]Visits, error) {
|
||||
return d.mr.GetHourVisits(start, d.stats)
|
||||
}
|
||||
|
||||
func (d *DB) GetDayVisits(start time.Time) ([]Visits, error) {
|
||||
return d.mr.GetDayVisits(start, d.stats)
|
||||
}
|
||||
|
||||
func (d *DB) GetMonthVisits(start time.Time) ([]Visits, error) {
|
||||
return d.mr.GetMonthVisits(start, d.stats)
|
||||
}
|
266
tools/coverNew/mapreduce.go
Normal file
266
tools/coverNew/mapreduce.go
Normal file
|
@ -0,0 +1,266 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"labix.org/v2/mgo"
|
||||
"labix.org/v2/mgo/bson"
|
||||
"time"
|
||||
)
|
||||
|
||||
type MR struct {
|
||||
meta *mgo.Collection
|
||||
tags *mgo.Collection
|
||||
visited *mgo.Collection
|
||||
downloaded *mgo.Collection
|
||||
hourly_raw *mgo.Collection
|
||||
daily_raw *mgo.Collection
|
||||
monthly_raw *mgo.Collection
|
||||
hourly *mgo.Collection
|
||||
daily *mgo.Collection
|
||||
monthly *mgo.Collection
|
||||
}
|
||||
|
||||
func NewMR(database *mgo.Database) *MR {
|
||||
m := new(MR)
|
||||
m.meta = database.C(META_COLL)
|
||||
m.tags = database.C(TAGS_COLL)
|
||||
m.visited = database.C(VISITED_COLL)
|
||||
m.downloaded = database.C(DOWNLOADED_COLL)
|
||||
m.hourly_raw = database.C(HOURLY_VISITS_COLL + "_raw")
|
||||
m.daily_raw = database.C(DAILY_VISITS_COLL + "_raw")
|
||||
m.monthly_raw = database.C(MONTHLY_VISITS_COLL + "_raw")
|
||||
m.hourly = database.C(HOURLY_VISITS_COLL)
|
||||
m.daily = database.C(DAILY_VISITS_COLL)
|
||||
m.monthly = database.C(MONTHLY_VISITS_COLL)
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *MR) GetTags(numTags int, booksColl *mgo.Collection) ([]string, error) {
|
||||
if m.isOutdated(TAGS_COLL, MINUTES_UPDATE_TAGS) {
|
||||
var mr mgo.MapReduce
|
||||
mr.Map = `function() {
|
||||
if (this.subject) {
|
||||
this.subject.forEach(function(s) { emit(s, 1); });
|
||||
}
|
||||
}`
|
||||
mr.Reduce = `function(tag, vals) {
|
||||
var count = 0;
|
||||
vals.forEach(function() { count += 1; });
|
||||
return count;
|
||||
}`
|
||||
err := m.update(&mr, bson.M{"active": true}, booksColl, TAGS_COLL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var result []struct {
|
||||
Tag string "_id"
|
||||
}
|
||||
err := m.tags.Find(nil).Sort("-value").Limit(numTags).All(&result)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags := make([]string, len(result))
|
||||
for i, r := range result {
|
||||
tags[i] = r.Tag
|
||||
}
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func (m *MR) GetMostVisited(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
|
||||
if m.isOutdated(VISITED_COLL, MINUTES_UPDATE_VISITED) {
|
||||
var mr mgo.MapReduce
|
||||
mr.Map = `function() {
|
||||
emit(this.id, 1);
|
||||
}`
|
||||
mr.Reduce = `function(tag, vals) {
|
||||
var count = 0;
|
||||
vals.forEach(function() { count += 1; });
|
||||
return count;
|
||||
}`
|
||||
err := m.update(&mr, bson.M{"section": "book"}, statsColl, VISITED_COLL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var result []struct {
|
||||
Book bson.ObjectId "_id"
|
||||
}
|
||||
err := m.visited.Find(nil).Sort("-value").Limit(num).All(&result)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
books := make([]bson.ObjectId, len(result))
|
||||
for i, r := range result {
|
||||
books[i] = r.Book
|
||||
}
|
||||
return books, nil
|
||||
}
|
||||
|
||||
func (m *MR) GetMostDownloaded(num int, statsColl *mgo.Collection) ([]bson.ObjectId, error) {
|
||||
if m.isOutdated(DOWNLOADED_COLL, MINUTES_UPDATE_DOWNLOADED) {
|
||||
var mr mgo.MapReduce
|
||||
mr.Map = `function() {
|
||||
emit(this.id, 1);
|
||||
}`
|
||||
mr.Reduce = `function(tag, vals) {
|
||||
var count = 0;
|
||||
vals.forEach(function() { count += 1; });
|
||||
return count;
|
||||
}`
|
||||
err := m.update(&mr, bson.M{"section": "download"}, statsColl, DOWNLOADED_COLL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var result []struct {
|
||||
Book bson.ObjectId "_id"
|
||||
}
|
||||
err := m.downloaded.Find(nil).Sort("-value").Limit(num).All(&result)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
books := make([]bson.ObjectId, len(result))
|
||||
for i, r := range result {
|
||||
books[i] = r.Book
|
||||
}
|
||||
return books, nil
|
||||
}
|
||||
|
||||
func (m *MR) GetHourVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
|
||||
if m.isOutdated(HOURLY_VISITS_COLL, MINUTES_UPDATE_HOURLY) {
|
||||
const reduce = `function(date, vals) {
|
||||
var count = 0;
|
||||
vals.forEach(function(v) { count += v; });
|
||||
return count;
|
||||
}`
|
||||
var mr mgo.MapReduce
|
||||
mr.Map = `function() {
|
||||
var date = Date.UTC(this.date.getUTCFullYear(),
|
||||
this.date.getUTCMonth(),
|
||||
this.date.getUTCDate(),
|
||||
this.date.getUTCHours());
|
||||
emit({date: date, session: this.session}, 1);
|
||||
}`
|
||||
mr.Reduce = reduce
|
||||
err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, HOURLY_VISITS_COLL+"_raw")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var mr2 mgo.MapReduce
|
||||
mr2.Map = `function() {
|
||||
emit(this['_id']['date'], 1);
|
||||
}`
|
||||
mr2.Reduce = reduce
|
||||
err = m.update(&mr2, bson.M{}, m.hourly_raw, HOURLY_VISITS_COLL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var result []Visits
|
||||
err := m.hourly.Find(nil).All(&result)
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (m *MR) GetDayVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
|
||||
if m.isOutdated(DAILY_VISITS_COLL, MINUTES_UPDATE_DAILY) {
|
||||
const reduce = `function(date, vals) {
|
||||
var count = 0;
|
||||
vals.forEach(function(v) { count += v; });
|
||||
return count;
|
||||
}`
|
||||
var mr mgo.MapReduce
|
||||
mr.Map = `function() {
|
||||
var date = Date.UTC(this.date.getUTCFullYear(),
|
||||
this.date.getUTCMonth(),
|
||||
this.date.getUTCDate());
|
||||
emit({date: date, session: this.session}, 1);
|
||||
}`
|
||||
mr.Reduce = reduce
|
||||
err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, DAILY_VISITS_COLL+"_raw")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var mr2 mgo.MapReduce
|
||||
mr2.Map = `function() {
|
||||
emit(this['_id']['date'], 1);
|
||||
}`
|
||||
mr2.Reduce = reduce
|
||||
err = m.update(&mr2, bson.M{}, m.daily_raw, DAILY_VISITS_COLL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var result []Visits
|
||||
err := m.daily.Find(nil).All(&result)
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (m *MR) GetMonthVisits(start time.Time, statsColl *mgo.Collection) ([]Visits, error) {
|
||||
if m.isOutdated(MONTHLY_VISITS_COLL, MINUTES_UPDATE_MONTHLY) {
|
||||
const reduce = `function(date, vals) {
|
||||
var count = 0;
|
||||
vals.forEach(function(v) { count += v; });
|
||||
return count;
|
||||
}`
|
||||
var mr mgo.MapReduce
|
||||
mr.Map = `function() {
|
||||
var date = Date.UTC(this.date.getUTCFullYear(),
|
||||
this.date.getUTCMonth());
|
||||
emit({date: date, session: this.session}, 1);
|
||||
}`
|
||||
mr.Reduce = reduce
|
||||
err := m.update(&mr, bson.M{"date": bson.M{"$gte": start}}, statsColl, MONTHLY_VISITS_COLL+"_raw")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var mr2 mgo.MapReduce
|
||||
mr2.Map = `function() {
|
||||
emit(this['_id']['date'], 1);
|
||||
}`
|
||||
mr2.Reduce = reduce
|
||||
err = m.update(&mr2, bson.M{}, m.monthly_raw, MONTHLY_VISITS_COLL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var result []Visits
|
||||
err := m.monthly.Find(nil).All(&result)
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (m *MR) update(mr *mgo.MapReduce, query bson.M, queryColl *mgo.Collection, storeColl string) error {
|
||||
_, err := m.meta.RemoveAll(bson.M{"type": storeColl})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mr.Out = bson.M{"replace": storeColl}
|
||||
_, err = queryColl.Find(query).MapReduce(mr, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return m.meta.Insert(bson.M{"type": storeColl})
|
||||
}
|
||||
|
||||
func (m *MR) isOutdated(coll string, minutes float64) bool {
|
||||
var result struct {
|
||||
Id bson.ObjectId `bson:"_id"`
|
||||
}
|
||||
err := m.meta.Find(bson.M{"type": coll}).One(&result)
|
||||
if err != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
lastUpdate := result.Id.Time()
|
||||
return time.Since(lastUpdate).Minutes() > minutes
|
||||
}
|
76
tools/coverNew/session.go
Normal file
76
tools/coverNew/session.go
Normal file
|
@ -0,0 +1,76 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"github.com/gorilla/securecookie"
|
||||
"github.com/gorilla/sessions"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
var sesStore = sessions.NewCookieStore(securecookie.GenerateRandomKey(64))
|
||||
|
||||
type Notification struct {
|
||||
Title string
|
||||
Msg string
|
||||
Type string /* error, info or success */
|
||||
}
|
||||
|
||||
type Session struct {
|
||||
User string
|
||||
Notif []Notification
|
||||
S *sessions.Session
|
||||
}
|
||||
|
||||
func getNotif(session *sessions.Session) []Notification {
|
||||
msgs := session.Flashes("nMsg")
|
||||
titles := session.Flashes("nTitle")
|
||||
tpes := session.Flashes("nType")
|
||||
notif := make([]Notification, len(msgs))
|
||||
for i, m := range msgs {
|
||||
msg, _ := m.(string)
|
||||
title, _ := titles[i].(string)
|
||||
tpe, _ := tpes[i].(string)
|
||||
notif[i] = Notification{title, msg, tpe}
|
||||
}
|
||||
return notif
|
||||
}
|
||||
|
||||
func GetSession(r *http.Request) (s *Session) {
|
||||
s = new(Session)
|
||||
var err error
|
||||
s.S, err = sesStore.Get(r, "session")
|
||||
if err == nil && !s.S.IsNew {
|
||||
s.User, _ = s.S.Values["user"].(string)
|
||||
s.Notif = getNotif(s.S)
|
||||
}
|
||||
|
||||
if s.S.IsNew {
|
||||
s.S.Values["id"] = hex.EncodeToString(securecookie.GenerateRandomKey(16))
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (s *Session) LogIn(user string) {
|
||||
s.User = user
|
||||
s.S.Values["user"] = user
|
||||
}
|
||||
|
||||
func (s *Session) LogOut() {
|
||||
s.S.Values["user"] = ""
|
||||
}
|
||||
|
||||
func (s *Session) Notify(title, msg, tpe string) {
|
||||
s.S.AddFlash(msg, "nMsg")
|
||||
s.S.AddFlash(title, "nTitle")
|
||||
s.S.AddFlash(tpe, "nType")
|
||||
}
|
||||
|
||||
func (s *Session) Save(w http.ResponseWriter, r *http.Request) {
|
||||
sesStore.Save(r, w, s.S)
|
||||
}
|
||||
|
||||
func (s *Session) Id() string {
|
||||
id, _ := s.S.Values["id"].(string)
|
||||
return id
|
||||
}
|
128
tools/coverNew/store.go
Normal file
128
tools/coverNew/store.go
Normal file
|
@ -0,0 +1,128 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"git.gitorious.org/go-pkg/epubgo.git"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"labix.org/v2/mgo/bson"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func OpenBook(id bson.ObjectId) (*epubgo.Epub, error) {
|
||||
fs := db.GetFS(FS_BOOKS)
|
||||
f, err := fs.OpenId(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
buff, err := ioutil.ReadAll(f)
|
||||
reader := bytes.NewReader(buff)
|
||||
|
||||
return epubgo.Load(reader, int64(len(buff)))
|
||||
}
|
||||
|
||||
func StoreNewFile(name string, file io.Reader) (bson.ObjectId, error) {
|
||||
fs := db.GetFS(FS_BOOKS)
|
||||
fw, err := fs.Create(name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer fw.Close()
|
||||
|
||||
_, err = io.Copy(fw, file)
|
||||
id, _ := fw.Id().(bson.ObjectId)
|
||||
return id, err
|
||||
}
|
||||
|
||||
func DeleteFile(id bson.ObjectId) error {
|
||||
fs := db.GetFS(FS_BOOKS)
|
||||
return fs.RemoveId(id)
|
||||
}
|
||||
|
||||
func DeleteCover(id bson.ObjectId) error {
|
||||
fs := db.GetFS(FS_IMGS)
|
||||
return fs.RemoveId(id)
|
||||
}
|
||||
|
||||
func DeleteBook(book Book) {
|
||||
if book.Cover != "" {
|
||||
DeleteCover(book.Cover)
|
||||
}
|
||||
if book.CoverSmall != "" {
|
||||
DeleteCover(book.CoverSmall)
|
||||
}
|
||||
DeleteFile(book.File)
|
||||
}
|
||||
|
||||
func cleanStr(str string) string {
|
||||
str = strings.Replace(str, "'", "'", -1)
|
||||
exp, _ := regexp.Compile("&[^;]*;")
|
||||
str = exp.ReplaceAllString(str, "")
|
||||
exp, _ = regexp.Compile("[ ,]*$")
|
||||
str = exp.ReplaceAllString(str, "")
|
||||
return str
|
||||
}
|
||||
|
||||
func parseAuthr(creator []string) []string {
|
||||
exp1, _ := regexp.Compile("^(.*\\( *([^\\)]*) *\\))*$")
|
||||
exp2, _ := regexp.Compile("^[^:]*: *(.*)$")
|
||||
res := make([]string, len(creator))
|
||||
for i, s := range creator {
|
||||
auth := exp1.FindStringSubmatch(s)
|
||||
if auth != nil {
|
||||
res[i] = cleanStr(strings.Join(auth[2:], ", "))
|
||||
} else {
|
||||
auth := exp2.FindStringSubmatch(s)
|
||||
if auth != nil {
|
||||
res[i] = cleanStr(auth[1])
|
||||
} else {
|
||||
res[i] = cleanStr(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func parseDescription(description []string) string {
|
||||
str := cleanStr(strings.Join(description, "\n"))
|
||||
str = strings.Replace(str, "</p>", "\n", -1)
|
||||
exp, _ := regexp.Compile("<[^>]*>")
|
||||
str = exp.ReplaceAllString(str, "")
|
||||
str = strings.Replace(str, "&", "&", -1)
|
||||
str = strings.Replace(str, "<", "<", -1)
|
||||
str = strings.Replace(str, ">", ">", -1)
|
||||
str = strings.Replace(str, "\\n", "\n", -1)
|
||||
return str
|
||||
}
|
||||
|
||||
func parseSubject(subject []string) []string {
|
||||
var res []string
|
||||
for _, s := range subject {
|
||||
res = append(res, strings.Split(s, " / ")...)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func parseDate(date []string) string {
|
||||
if len(date) == 0 {
|
||||
return ""
|
||||
}
|
||||
return strings.Replace(date[0], "Unspecified: ", "", -1)
|
||||
}
|
||||
|
||||
func keywords(b map[string]interface{}) (k []string) {
|
||||
title, _ := b["title"].(string)
|
||||
k = strings.Split(title, " ")
|
||||
author, _ := b["author"].([]string)
|
||||
for _, a := range author {
|
||||
k = append(k, strings.Split(a, " ")...)
|
||||
}
|
||||
publisher, _ := b["publisher"].(string)
|
||||
k = append(k, strings.Split(publisher, " ")...)
|
||||
subject, _ := b["subject"].([]string)
|
||||
k = append(k, subject...)
|
||||
return
|
||||
}
|
|
@ -121,6 +121,7 @@ type indexData struct {
|
|||
DownloadedBooks []Book
|
||||
Count int
|
||||
Tags []string
|
||||
News []newsEntry
|
||||
}
|
||||
|
||||
func indexHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
||||
|
@ -132,6 +133,7 @@ func indexHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
|||
data.Books, data.Count, _ = db.GetBooks(bson.M{"active": true}, 6)
|
||||
data.VisitedBooks, _ = db.GetVisitedBooks(6)
|
||||
data.DownloadedBooks, _ = db.GetDownloadedBooks(6)
|
||||
data.News = getNews(1, DAYS_NEWS_INDEXPAGE)
|
||||
loadTemplate(w, "index", data)
|
||||
}
|
||||
|
||||
|
@ -181,6 +183,9 @@ func setUpRouter() {
|
|||
r.HandleFunc("/cover/{id:[0-9a-fA-F]+}/{size}/{img:.*}", coverHandler)
|
||||
r.HandleFunc("/settings/", GatherStats(settingsHandler))
|
||||
r.HandleFunc("/stats/", GatherStats(statsHandler))
|
||||
r.HandleFunc("/news/", GatherStats(newsHandler))
|
||||
r.HandleFunc("/news/edit", GatherStats(editNewsHandler)).Methods("GET")
|
||||
r.HandleFunc("/news/edit", GatherStats(postNewsHandler)).Methods("POST")
|
||||
h := http.FileServer(http.Dir(IMG_PATH))
|
||||
r.Handle("/img/{img}", http.StripPrefix("/img/", h))
|
||||
h = http.FileServer(http.Dir(CSS_PATH))
|
||||
|
|
79
upload.go
79
upload.go
|
@ -18,51 +18,62 @@ func InitUpload() {
|
|||
var uploadChannel chan uploadRequest
|
||||
|
||||
type uploadRequest struct {
|
||||
epubs []*multipart.FileHeader
|
||||
file multipart.File
|
||||
filename string
|
||||
}
|
||||
|
||||
func uploadWorker() {
|
||||
for req := range uploadChannel {
|
||||
for _, f := range req.epubs {
|
||||
file, err := f.Open()
|
||||
if err != nil {
|
||||
log.Println("Can not open uploaded file", f.Filename, ":", err)
|
||||
continue
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
epub, err := openMultipartEpub(file)
|
||||
if err != nil {
|
||||
log.Println("Not valid epub uploaded file", f.Filename, ":", err)
|
||||
continue
|
||||
}
|
||||
defer epub.Close()
|
||||
|
||||
book := parseFile(epub)
|
||||
title, _ := book["title"].(string)
|
||||
file.Seek(0, 0)
|
||||
id, err := StoreNewFile(title+".epub", file)
|
||||
if err != nil {
|
||||
log.Println("Error storing book (", title, "):", err)
|
||||
continue
|
||||
}
|
||||
|
||||
book["file"] = id
|
||||
db.InsertBook(book)
|
||||
log.Println("File uploaded:", f.Filename)
|
||||
}
|
||||
processFile(req)
|
||||
}
|
||||
}
|
||||
|
||||
func processFile(req uploadRequest) {
|
||||
defer req.file.Close()
|
||||
|
||||
epub, err := openMultipartEpub(req.file)
|
||||
if err != nil {
|
||||
log.Println("Not valid epub uploaded file", req.filename, ":", err)
|
||||
return
|
||||
}
|
||||
defer epub.Close()
|
||||
|
||||
book := parseFile(epub)
|
||||
title, _ := book["title"].(string)
|
||||
req.file.Seek(0, 0)
|
||||
id, err := StoreNewFile(title+".epub", req.file)
|
||||
if err != nil {
|
||||
log.Println("Error storing book (", title, "):", err)
|
||||
return
|
||||
}
|
||||
|
||||
book["file"] = id
|
||||
db.InsertBook(book)
|
||||
log.Println("File uploaded:", req.filename)
|
||||
}
|
||||
|
||||
func uploadPostHandler(w http.ResponseWriter, r *http.Request, sess *Session) {
|
||||
problem := false
|
||||
|
||||
r.ParseMultipartForm(20000000)
|
||||
filesForm := r.MultipartForm.File["epub"]
|
||||
uploadChannel <- uploadRequest{filesForm}
|
||||
for _, f := range filesForm {
|
||||
file, err := f.Open()
|
||||
if err != nil {
|
||||
log.Println("Can not open uploaded file", f.Filename, ":", err)
|
||||
sess.Notify("Upload problem!", "There was a problem with book "+f.Filename, "error")
|
||||
problem = true
|
||||
continue
|
||||
}
|
||||
uploadChannel <- uploadRequest{file, f.Filename}
|
||||
}
|
||||
|
||||
if len(filesForm) > 0 {
|
||||
sess.Notify("Upload successful!", "Thank you for your contribution", "success")
|
||||
} else {
|
||||
sess.Notify("Upload problem!", "No books where uploaded.", "error")
|
||||
if !problem {
|
||||
if len(filesForm) > 0 {
|
||||
sess.Notify("Upload successful!", "Thank you for your contribution", "success")
|
||||
} else {
|
||||
sess.Notify("Upload problem!", "No books where uploaded.", "error")
|
||||
}
|
||||
}
|
||||
uploadHandler(w, r, sess)
|
||||
}
|
||||
|
|
Reference in a new issue