Normalize contents of webroot variables

This commit is contained in:
Alex Cabal 2022-07-09 12:03:37 -05:00
parent 7c087e0e13
commit 4300619383
5 changed files with 92 additions and 95 deletions

View file

@ -190,6 +190,32 @@ class Library{
return $matches; return $matches;
} }
/**
* @return array<Ebook>
*/
public static function GetEbooksFromFilesystem(?string $webRoot = WEB_ROOT): array{
$ebooks = [];
$contentFiles = explode("\n", trim(shell_exec('find ' . escapeshellarg($webRoot . '/ebooks/') . ' -name "content.opf" | sort') ?? ''));
foreach($contentFiles as $path){
if($path == '')
continue;
$ebookWwwFilesystemPath = '';
try{
$ebookWwwFilesystemPath = preg_replace('|/content\.opf|ius', '', $path);
$ebooks[] = new Ebook($ebookWwwFilesystemPath);
}
catch(\Exception $ex){
// An error in a book isn't fatal; just carry on.
}
}
return $ebooks;
}
public static function RebuildCache(): void{ public static function RebuildCache(): void{
// We check a lockfile because this can be a long-running command. // We check a lockfile because this can be a long-running command.
// We don't want to queue up a bunch of these in case someone is refreshing the index constantly. // We don't want to queue up a bunch of these in case someone is refreshing the index constantly.

View file

@ -6,7 +6,7 @@ To use, call this script with the directory where your ebooks go as its last arg
To use, call this script with the directories of the books you want to deploy as its arguments. For example, to deploy all ebooks after using sync-ebooks, run `deploy-ebook-to-www /standardebooks.org/ebooks/*`. To deploy only The Time Machine by H. G. Wells, you would run `deploy-ebook-to-www /standardebooks.org/ebooks/h-g-wells_the-time-machine`. To output progress information, use `-v` or `--verbose`. To use, call this script with the directories of the books you want to deploy as its arguments. For example, to deploy all ebooks after using sync-ebooks, run `deploy-ebook-to-www /standardebooks.org/ebooks/*`. To deploy only The Time Machine by H. G. Wells, you would run `deploy-ebook-to-www /standardebooks.org/ebooks/h-g-wells_the-time-machine`. To output progress information, use `-v` or `--verbose`.
The default web root is `/standardebooks.org`. If it is located elsewhere, specify it with the `--webroot` option. For instance, `deploy-ebook-to-www --webroot /var/www/html /path/to/ebook`. Note that there will be php errors if the Git repositories are not in the ebook directory immediately in the web root. Either keep them there or create a symlink. The default web root is `/standardebooks.org/web/www`. If it is located elsewhere, specify it with the `--webroot` option. For instance, `deploy-ebook-to-www --webroot /var/www/html /path/to/ebook`. Note that there will be php errors if the Git repositories are not in the ebook directory immediately in the web root. Either keep them there or create a symlink.
The default group is `se`. to use a different one, specify it with the `--group` option. The default group is `se`. to use a different one, specify it with the `--group` option.

View file

@ -9,7 +9,7 @@ USAGE
deploy-ebook-to-www [-v,--verbose] [-g,--group GROUP] [--webroot WEBROOT] [--weburl WEBURL] [--no-images] [--no-build] [--no-epubcheck] [--no-recompose] [--no-feeds] [-l,--last-push-hash HASH] DIRECTORY [DIRECTORY...] deploy-ebook-to-www [-v,--verbose] [-g,--group GROUP] [--webroot WEBROOT] [--weburl WEBURL] [--no-images] [--no-build] [--no-epubcheck] [--no-recompose] [--no-feeds] [-l,--last-push-hash HASH] DIRECTORY [DIRECTORY...]
DIRECTORY is a bare source repository. DIRECTORY is a bare source repository.
GROUP is a groupname. Defaults to "se". GROUP is a groupname. Defaults to "se".
WEBROOT is the path to your webroot. Defaults to "/standardebooks.org". WEBROOT is the path to your webroot. Defaults to "/standardebooks.org/web/www".
WEBURL is the URL the website is served on. Defaults to "https://standardebooks.org". WEBURL is the URL the website is served on. Defaults to "https://standardebooks.org".
The deploy process does four things: The deploy process does four things:
@ -45,7 +45,7 @@ verbose="false"
images="true" images="true"
build="true" build="true"
group="se" group="se"
webRoot="/standardebooks.org/web" webRoot="/standardebooks.org/web/www"
webUrl="https://standardebooks.org" webUrl="https://standardebooks.org"
lastPushHash="" lastPushHash=""
epubcheck="true" epubcheck="true"
@ -134,7 +134,7 @@ if ! [ -f "${scriptsDir}"/generate-feeds ]; then
die "\"${scriptsDir}\"/generate-feeds\" is not a file or could not be found." die "\"${scriptsDir}\"/generate-feeds\" is not a file or could not be found."
fi fi
mkdir -p "${webRoot}"/www/images/covers/ mkdir -p "${webRoot}"/images/covers/
for dir in "$@" for dir in "$@"
do do
@ -176,7 +176,7 @@ do
if [ "${images}" = "true" ]; then if [ "${images}" = "true" ]; then
# Always build images if they don't exist, or if they've changed # Always build images if they don't exist, or if they've changed
if [[ ! -f "${webRoot}/www/images/covers/${urlSafeIdentifier}-cover.jpg" ]] || [[ "${diff}" =~ diff\ --git\ a/images/cover.jpg ]] || [[ "${diff}" =~ diff\ --git\ a/images/cover.svg ]]; then if [[ ! -f "${webRoot}/images/covers/${urlSafeIdentifier}-cover.jpg" ]] || [[ "${diff}" =~ diff\ --git\ a/images/cover.jpg ]] || [[ "${diff}" =~ diff\ --git\ a/images/cover.svg ]]; then
images="true" images="true"
else else
images="false" images="false"
@ -202,7 +202,7 @@ do
workDir=$(mktemp -d) workDir=$(mktemp -d)
imgWorkDir=$(mktemp -d) imgWorkDir=$(mktemp -d)
webDir="${webRoot}/www/ebooks/${webDir}" webDir="${webRoot}/ebooks/${webDir}"
if [ "${images}" = "true" ]; then if [ "${images}" = "true" ]; then
if [ "${verbose}" = "true" ]; then if [ "${verbose}" = "true" ]; then
@ -296,7 +296,7 @@ do
# Recompose the epub into a single file, but put it outside of the epub src for now so we don't stomp on it with the following sections. # Recompose the epub into a single file, but put it outside of the epub src for now so we don't stomp on it with the following sections.
# We do this first because the tweaks below shouldn't apply to the single-page file # We do this first because the tweaks below shouldn't apply to the single-page file
se recompose-epub --xhtml --output "${workDir}"/single-page.xhtml --extra-css-file="${webRoot}/www/css/web.css" "${workDir}" se recompose-epub --xhtml --output "${workDir}"/single-page.xhtml --extra-css-file="${webRoot}/css/web.css" "${workDir}"
# Add a navbar with a link back to the homepage # Add a navbar with a link back to the homepage
sed --in-place --regexp-extended "s|<body(.*?)>|<body\1><header><nav><ul><li><a href=\"/\">Standard Ebooks</a></li><li><a href=\"${bookUrl}\">Back to ebook</a></li></ul></nav></header>|" "${workDir}"/single-page.xhtml sed --in-place --regexp-extended "s|<body(.*?)>|<body\1><header><nav><ul><li><a href=\"/\">Standard Ebooks</a></li><li><a href=\"${bookUrl}\">Back to ebook</a></li></ul></nav></header>|" "${workDir}"/single-page.xhtml
@ -357,7 +357,7 @@ do
if [ "${images}" = "true" ]; then if [ "${images}" = "true" ]; then
# Move the cover images over # Move the cover images over
mv "${imgWorkDir}/${urlSafeIdentifier}"*.{jpg,avif} "${webRoot}/www/images/covers/" mv "${imgWorkDir}/${urlSafeIdentifier}"*.{jpg,avif} "${webRoot}/images/covers/"
fi fi
# Delete the now-empty work dir (empty except for .git) # Delete the now-empty work dir (empty except for .git)
@ -366,8 +366,8 @@ do
sudo chgrp --preserve-root --recursive "${group}" "${webDir}" sudo chgrp --preserve-root --recursive "${group}" "${webDir}"
sudo chmod --preserve-root --recursive g+ws "${webDir}" sudo chmod --preserve-root --recursive g+ws "${webDir}"
sudo chgrp --preserve-root --recursive "${group}" "${webRoot}/www/images/covers/" sudo chgrp --preserve-root --recursive "${group}" "${webRoot}/images/covers/"
sudo chmod --preserve-root --recursive g+ws "${webRoot}/www/images/covers/" sudo chmod --preserve-root --recursive g+ws "${webRoot}/images/covers/"
if [ "${verbose}" = "true" ]; then if [ "${verbose}" = "true" ]; then
printf "Rebuilding web library cache ... " printf "Rebuilding web library cache ... "
@ -390,9 +390,9 @@ if [ "${feeds}" = "true" ]; then
"${scriptsDir}/generate-feeds" --webroot "${webRoot}" --weburl "${webUrl}" "${scriptsDir}/generate-feeds" --webroot "${webRoot}" --weburl "${webUrl}"
sudo chown --recursive se:committers "${webRoot}"/www/{atom,rss,opds}/{*.xml,subjects} sudo chown --recursive se:committers "${webRoot}"/{atom,rss,opds}/{*.xml,subjects}
sudo chmod --recursive 664 "${webRoot}"/www/{atom,rss,opds}/{*.xml,subjects/*.xml} sudo chmod --recursive 664 "${webRoot}"/{atom,rss,opds}/{*.xml,subjects/*.xml}
sudo chmod 775 "${webRoot}"/www/{atom,rss,opds}/subjects sudo chmod 775 "${webRoot}"/{atom,rss,opds}/subjects
if [ "${verbose}" = "true" ]; then if [ "${verbose}" = "true" ]; then
printf "Done.\n" printf "Done.\n"

View file

@ -20,59 +20,42 @@ function SaveFeed($feed, $force, $now = null){
} }
} }
$longopts = ['webroot:', 'weburl:', 'force']; $longopts = ['webroot:', 'force'];
$options = getopt('', $longopts); $options = getopt('', $longopts);
$webRoot = $options['webroot'] ?? '/standardebooks.org/web'; $webRoot = $options['webroot'] ?? WEB_ROOT;
$webUrl = $options['weburl'] ?? 'https://standardebooks.org';
$force = isset($options['force']) ? true : false; // If the arg is present, getopts sets it to false!!! $force = isset($options['force']) ? true : false; // If the arg is present, getopts sets it to false!!!
$contentFiles = explode("\n", trim(shell_exec('find ' . escapeshellarg($webRoot . '/www/ebooks/') . ' -name "content.opf" | sort') ?? ''));
$allEbooks = []; $allEbooks = [];
$newestEbooks = []; $newestEbooks = [];
$subjects = []; $subjects = [];
$ebooksBySubject = []; $ebooksBySubject = [];
$ebooksPerNewestEbooksFeed = 30; $ebooksPerNewestEbooksFeed = 30;
if(!is_dir(WEB_ROOT . '/feeds/opds/subjects')){ if(!is_dir($webRoot . '/feeds/opds/subjects')){
mkdir(WEB_ROOT . '/feeds/opds/subjects'); mkdir($webRoot . '/feeds/opds/subjects');
} }
if(!is_dir(WEB_ROOT . '/feeds/rss/subjects')){ if(!is_dir($webRoot . '/feeds/rss/subjects')){
mkdir(WEB_ROOT . '/feeds/rss/subjects'); mkdir($webRoot . '/feeds/rss/subjects');
} }
if(!is_dir(WEB_ROOT . '/feeds/atom/subjects')){ if(!is_dir($webRoot . '/feeds/atom/subjects')){
mkdir(WEB_ROOT . '/feeds/atom/subjects'); mkdir($webRoot . '/feeds/atom/subjects');
} }
// Iterate over all ebooks to build the various feeds // Iterate over all ebooks to build the various feeds
foreach($contentFiles as $path){ foreach(Library::GetEbooksFromFilesystem($webRoot) as $ebook){
if($path == '') $allEbooks[$ebook->Updated->format('Y-m-d\TH:i:s\Z') . ' ' . $ebook->Identifier] = $ebook;
continue; $newestEbooks[$ebook->Created->format('Y-m-d\TH:i:s\Z') . ' ' . $ebook->Identifier] = $ebook;
$ebookWwwFilesystemPath = ''; foreach($ebook->Tags as $tag){
// Add the book's subjects to the main subjects list
try{ if(!in_array($tag->Name, $subjects)){
$ebookWwwFilesystemPath = preg_replace('|/content\.opf|ius', '', $path); $subjects[] = $tag->Name;
$ebook = new Ebook($ebookWwwFilesystemPath);
$allEbooks[$ebook->Updated->format('Y-m-d\TH:i:s\Z') . ' ' . $ebook->Identifier] = $ebook;
$newestEbooks[$ebook->Created->format('Y-m-d\TH:i:s\Z') . ' ' . $ebook->Identifier] = $ebook;
foreach($ebook->Tags as $tag){
// Add the book's subjects to the main subjects list
if(!in_array($tag->Name, $subjects)){
$subjects[] = $tag->Name;
}
// Sort this ebook by subject
$ebooksBySubject[$tag->Name][$ebook->Created->format('Y-m-d\TH:i:s\Z') . ' ' . $ebook->Identifier] = $ebook;
} }
}
catch(\Exception $ex){ // Sort this ebook by subject
print('Failed to generate OPDS entry for `' . $ebookWwwFilesystemPath . '`. Exception: ' . $ex->getMessage()); $ebooksBySubject[$tag->Name][$ebook->Created->format('Y-m-d\TH:i:s\Z') . ' ' . $ebook->Identifier] = $ebook;
continue;
} }
} }
@ -107,7 +90,7 @@ $opdsRootEntries = [
'acquisition') 'acquisition')
]; ];
$opdsRoot = new OpdsNavigationFeed('Standard Ebooks', 'The Standard Ebooks catalog.', '/feeds/opds', WEB_ROOT . '/feeds/opds/index.xml', $opdsRootEntries, null); $opdsRoot = new OpdsNavigationFeed('Standard Ebooks', 'The Standard Ebooks catalog.', '/feeds/opds', $webRoot . '/feeds/opds/index.xml', $opdsRootEntries, null);
SaveFeed($opdsRoot, $force, $now); SaveFeed($opdsRoot, $force, $now);
// Create the subjects navigation document // Create the subjects navigation document
@ -116,56 +99,56 @@ $subjectNavigationEntries = [];
foreach($subjects as $subject){ foreach($subjects as $subject){
$subjectNavigationEntries[] = new OpdsNavigationEntry($subject, 'Standard Ebooks tagged with “' . strtolower($subject) . ',” most-recently-released first.', '/feeds/opds/subjects/' . Formatter::MakeUrlSafe($subject), $now, 'subsection', 'navigation'); $subjectNavigationEntries[] = new OpdsNavigationEntry($subject, 'Standard Ebooks tagged with “' . strtolower($subject) . ',” most-recently-released first.', '/feeds/opds/subjects/' . Formatter::MakeUrlSafe($subject), $now, 'subsection', 'navigation');
} }
$subjectsFeed = new OpdsNavigationFeed('Standard Ebooks by Subject', 'Browse Standard Ebooks by subject.', '/feeds/opds/subjects', WEB_ROOT . '/feeds/opds/subjects/index.xml', $subjectNavigationEntries, $opdsRoot); $subjectsFeed = new OpdsNavigationFeed('Standard Ebooks by Subject', 'Browse Standard Ebooks by subject.', '/feeds/opds/subjects', $webRoot . '/feeds/opds/subjects/index.xml', $subjectNavigationEntries, $opdsRoot);
$subjectsFeed->Subtitle = 'Browse Standard Ebooks by subject.'; $subjectsFeed->Subtitle = 'Browse Standard Ebooks by subject.';
SaveFeed($subjectsFeed, $force, $now); SaveFeed($subjectsFeed, $force, $now);
// Now generate each individual subject feed // Now generate each individual subject feed
foreach($subjectNavigationEntries as $subjectNavigationEntry){ foreach($subjectNavigationEntries as $subjectNavigationEntry){
krsort($ebooksBySubject[$subjectNavigationEntry->Title]); krsort($ebooksBySubject[$subjectNavigationEntry->Title]);
$subjectFeed = new OpdsAcquisitionFeed($subjectNavigationEntry->Title . ' Ebooks', $subjectNavigationEntry->Description, '/feeds/opds/subjects/' . Formatter::MakeUrlSafe($subjectNavigationEntry->Title), WEB_ROOT . '/feeds/opds/subjects/' . Formatter::MakeUrlSafe($subjectNavigationEntry->Title) . '.xml', $ebooksBySubject[$subjectNavigationEntry->Title], $subjectsFeed); $subjectFeed = new OpdsAcquisitionFeed($subjectNavigationEntry->Title . ' Ebooks', $subjectNavigationEntry->Description, '/feeds/opds/subjects/' . Formatter::MakeUrlSafe($subjectNavigationEntry->Title), $webRoot . '/feeds/opds/subjects/' . Formatter::MakeUrlSafe($subjectNavigationEntry->Title) . '.xml', $ebooksBySubject[$subjectNavigationEntry->Title], $subjectsFeed);
SaveFeed($subjectFeed, $force, $now); SaveFeed($subjectFeed, $force, $now);
} }
// Create the 'all' feed // Create the 'all' feed
krsort($allEbooks); krsort($allEbooks);
$allFeed = new OpdsAcquisitionFeed('All Standard Ebooks', 'All Standard Ebooks, most-recently-updated first. This is a Complete Acquisition Feed as defined in OPDS 1.2 §2.5.', '/feeds/opds/all', WEB_ROOT . '/feeds/opds/all.xml', $allEbooks, $opdsRoot, true); $allFeed = new OpdsAcquisitionFeed('All Standard Ebooks', 'All Standard Ebooks, most-recently-updated first. This is a Complete Acquisition Feed as defined in OPDS 1.2 §2.5.', '/feeds/opds/all', $webRoot . '/feeds/opds/all.xml', $allEbooks, $opdsRoot, true);
SaveFeed($allFeed, $force, $now); SaveFeed($allFeed, $force, $now);
// Create the 'newest' feed // Create the 'newest' feed
$newestFeed = new OpdsAcquisitionFeed('Newest Standard Ebooks', 'The ' . number_format($ebooksPerNewestEbooksFeed) . ' latest Standard Ebooks, most-recently-released first.', '/feeds/opds/new-releases', WEB_ROOT . '/feeds/opds/new-releases.xml', $newestEbooks, $opdsRoot); $newestFeed = new OpdsAcquisitionFeed('Newest Standard Ebooks', 'The ' . number_format($ebooksPerNewestEbooksFeed) . ' latest Standard Ebooks, most-recently-released first.', '/feeds/opds/new-releases', $webRoot . '/feeds/opds/new-releases.xml', $newestEbooks, $opdsRoot);
SaveFeed($newestFeed, $force, $now); SaveFeed($newestFeed, $force, $now);
// Now create RSS feeds // Now create RSS feeds
// Create the 'newest' feed // Create the 'newest' feed
$newestRssFeed = new RssFeed('Standard Ebooks - Newest Ebooks', 'The ' . number_format($ebooksPerNewestEbooksFeed) . ' latest Standard Ebooks, most-recently-released first.', '/feeds/rss/new-releases', WEB_ROOT . '/feeds/rss/new-releases.xml', $newestEbooks); $newestRssFeed = new RssFeed('Standard Ebooks - Newest Ebooks', 'The ' . number_format($ebooksPerNewestEbooksFeed) . ' latest Standard Ebooks, most-recently-released first.', '/feeds/rss/new-releases', $webRoot . '/feeds/rss/new-releases.xml', $newestEbooks);
SaveFeed($newestRssFeed, $force); SaveFeed($newestRssFeed, $force);
// Create the 'all' feed // Create the 'all' feed
$allRssFeed = new RssFeed('Standard Ebooks - All Ebooks', 'All Standard Ebooks, most-recently-released first.', '/feeds/rss/all', WEB_ROOT . '/feeds/rss/all.xml', $allEbooks); $allRssFeed = new RssFeed('Standard Ebooks - All Ebooks', 'All Standard Ebooks, most-recently-released first.', '/feeds/rss/all', $webRoot . '/feeds/rss/all.xml', $allEbooks);
SaveFeed($allRssFeed, $force); SaveFeed($allRssFeed, $force);
// Generate each individual subject feed // Generate each individual subject feed
foreach($ebooksBySubject as $subject => $ebooks){ foreach($ebooksBySubject as $subject => $ebooks){
krsort($ebooks); krsort($ebooks);
$subjectRssFeed = new RssFeed('Standard Ebooks - ' . (string)$subject . ' Ebooks', 'Standard Ebooks tagged with “' . strtolower($subject) . ',” most-recently-released first.', '/feeds/rss/subjects/' . Formatter::MakeUrlSafe((string)$subject), WEB_ROOT . '/feeds/rss/subjects/' . Formatter::MakeUrlSafe((string)$subject) . '.xml', $ebooks); $subjectRssFeed = new RssFeed('Standard Ebooks - ' . (string)$subject . ' Ebooks', 'Standard Ebooks tagged with “' . strtolower($subject) . ',” most-recently-released first.', '/feeds/rss/subjects/' . Formatter::MakeUrlSafe((string)$subject), $webRoot . '/feeds/rss/subjects/' . Formatter::MakeUrlSafe((string)$subject) . '.xml', $ebooks);
SaveFeed($subjectRssFeed, $force); SaveFeed($subjectRssFeed, $force);
} }
// Now create the Atom feeds // Now create the Atom feeds
// Create the 'newest' feed // Create the 'newest' feed
$newestAtomFeed = new AtomFeed('Standard Ebooks - Newest Ebooks', 'The ' . number_format($ebooksPerNewestEbooksFeed) . ' latest Standard Ebooks, most-recently-released first.', '/feeds/atom/new-releases', WEB_ROOT . '/feeds/atom/new-releases.xml', $newestEbooks); $newestAtomFeed = new AtomFeed('Standard Ebooks - Newest Ebooks', 'The ' . number_format($ebooksPerNewestEbooksFeed) . ' latest Standard Ebooks, most-recently-released first.', '/feeds/atom/new-releases', $webRoot . '/feeds/atom/new-releases.xml', $newestEbooks);
SaveFeed($newestAtomFeed, $force, $now); SaveFeed($newestAtomFeed, $force, $now);
// Create the 'all' feed // Create the 'all' feed
$allAtomFeed = new AtomFeed('Standard Ebooks - All Ebooks', 'All Standard Ebooks, most-recently-released first.', '/feeds/atom/all', WEB_ROOT . '/feeds/atom/all.xml', $allEbooks); $allAtomFeed = new AtomFeed('Standard Ebooks - All Ebooks', 'All Standard Ebooks, most-recently-released first.', '/feeds/atom/all', $webRoot . '/feeds/atom/all.xml', $allEbooks);
SaveFeed($allAtomFeed, $force, $now); SaveFeed($allAtomFeed, $force, $now);
// Generate each individual subject feed // Generate each individual subject feed
foreach($ebooksBySubject as $subject => $ebooks){ foreach($ebooksBySubject as $subject => $ebooks){
krsort($ebooks); krsort($ebooks);
$subjectAtomFeed = new AtomFeed('Standard Ebooks - ' . (string)$subject . ' Ebooks', 'Standard Ebooks tagged with “' . strtolower($subject) . ',” most-recently-released first.', '/feeds/atom/subjects/' . Formatter::MakeUrlSafe((string)$subject), WEB_ROOT . '/feeds/atom/subjects/' . Formatter::MakeUrlSafe((string)$subject) . '.xml', $ebooks); $subjectAtomFeed = new AtomFeed('Standard Ebooks - ' . (string)$subject . ' Ebooks', 'Standard Ebooks tagged with “' . strtolower($subject) . ',” most-recently-released first.', '/feeds/atom/subjects/' . Formatter::MakeUrlSafe((string)$subject), $webRoot . '/feeds/atom/subjects/' . Formatter::MakeUrlSafe((string)$subject) . '.xml', $ebooks);
SaveFeed($subjectAtomFeed, $force, $now); SaveFeed($subjectAtomFeed, $force, $now);
} }
?> ?>

View file

@ -2,49 +2,32 @@
<? <?
require_once('/standardebooks.org/web/lib/Core.php'); require_once('/standardebooks.org/web/lib/Core.php');
$longopts = ['webroot:', 'weburl:']; $longopts = ['webroot:'];
$options = getopt('', $longopts); $options = getopt('', $longopts);
$webRoot = $options['webroot'] ?? '/standardebooks.org/web'; $webRoot = $options['webroot'] ?? WEB_ROOT;
$webUrl = $options['weburl'] ?? 'https://standardebooks.org';
$contentFiles = explode("\n", trim(shell_exec('find ' . escapeshellarg($webRoot . '/www/ebooks/') . ' -name "content.opf" | sort') ?? ''));
$ebooksByMonth = []; $ebooksByMonth = [];
$lastUpdatedTimestamps = []; $lastUpdatedTimestamps = [];
// Iterate over all ebooks and arrange them by publication month // Iterate over all ebooks and arrange them by publication month
foreach($contentFiles as $path){ foreach(Library::GetEbooksFromFilesystem($webRoot) as $ebook){
if($path == '') $timestamp = $ebook->Created->format('Y-m');
continue; $updatedTimestamp = $ebook->Updated->getTimestamp();
$ebookWwwFilesystemPath = ''; if(!isset($ebooksByMonth[$timestamp])){
$ebooksByMonth[$timestamp] = [];
try{ $lastUpdatedTimestamps[$timestamp] = $updatedTimestamp;
$ebookWwwFilesystemPath = preg_replace('|/content\.opf|ius', '', $path);
$ebook = new Ebook($ebookWwwFilesystemPath);
$timestamp = $ebook->Created->format('Y-m');
$updatedTimestamp = $ebook->Updated->getTimestamp();
if(!isset($ebooksByMonth[$timestamp])){
$ebooksByMonth[$timestamp] = [];
$lastUpdatedTimestamps[$timestamp] = $updatedTimestamp;
}
$ebooksByMonth[$timestamp][] = $ebook;
if($updatedTimestamp > $lastUpdatedTimestamps[$timestamp]){
$lastUpdatedTimestamps[$timestamp] = $updatedTimestamp;
}
} }
catch(\Exception $ex){
print('Failed to generate download for `' . $ebookWwwFilesystemPath . '`. Exception: ' . $ex->getMessage()); $ebooksByMonth[$timestamp][] = $ebook;
continue; if($updatedTimestamp > $lastUpdatedTimestamps[$timestamp]){
$lastUpdatedTimestamps[$timestamp] = $updatedTimestamp;
} }
} }
foreach($ebooksByMonth as $month => $ebooks){ foreach($ebooksByMonth as $month => $ebooks){
$filename = 'se-ebooks-' . $month . '.zip'; $filename = 'se-ebooks-' . $month . '.zip';
$filePath = $webRoot . '/www/patrons-circle/downloads/' . $filename; $filePath = $webRoot . '/patrons-circle/downloads/' . $filename;
// If the file doesn't exist, or if the content.opf last updated time is newer than the file modification time // If the file doesn't exist, or if the content.opf last updated time is newer than the file modification time
if(!file_exists($filePath) || filemtime($filePath) < $lastUpdatedTimestamps[$month]){ if(!file_exists($filePath) || filemtime($filePath) < $lastUpdatedTimestamps[$month]){
@ -61,28 +44,33 @@ foreach($ebooksByMonth as $month => $ebooks){
foreach($ebooks as $ebook){ foreach($ebooks as $ebook){
if($ebook->EpubUrl !== null){ if($ebook->EpubUrl !== null){
$ebookFilePath = $webRoot . '/www' . $ebook->EpubUrl; $ebookFilePath = $webRoot . '/' . $ebook->EpubUrl;
$zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath)); $zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath));
} }
if($ebook->Azw3Url !== null){ if($ebook->Azw3Url !== null){
$ebookFilePath = $webRoot . '/www' . $ebook->Azw3Url; $ebookFilePath = $webRoot . '/' . $ebook->Azw3Url;
$zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath)); $zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath));
} }
if($ebook->KepubUrl !== null){ if($ebook->KepubUrl !== null){
$ebookFilePath = $webRoot . '/www' . $ebook->KepubUrl; $ebookFilePath = $webRoot . '/' . $ebook->KepubUrl;
$zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath)); $zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath));
} }
if($ebook->AdvancedEpubUrl !== null){ if($ebook->AdvancedEpubUrl !== null){
$ebookFilePath = $webRoot . '/www' . $ebook->AdvancedEpubUrl; $ebookFilePath = $webRoot . '/' . $ebook->AdvancedEpubUrl;
$zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath)); $zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . basename($ebookFilePath));
} }
if($ebook->TextSinglePageUrl !== null){ if($ebook->TextSinglePageUrl !== null){
$ebookFilePath = $webRoot . '/www' . $ebook->TextSinglePageUrl . '.xhtml'; $ebookFilePath = $webRoot . '/' . $ebook->TextSinglePageUrl . '.xhtml';
$zip->addFile($ebookFilePath, $ebook->UrlSafeIdentifier . '/' . str_replace('single-page', $ebook->UrlSafeIdentifier, basename($ebookFilePath)));
// Strip the navigation header that was added as part of the deploy process
$xhtml = file_get_contents($ebookFilePath);
$xhtml = preg_replace('|<body><header><nav>.+?</nav></header>|ius', '<body>', $xhtml);
$zip->addFromString($ebook->UrlSafeIdentifier . '/' . str_replace('single-page', $ebook->UrlSafeIdentifier, basename($ebookFilePath)), $xhtml);
} }
} }