#!/bin/bash usage(){ fmt <&2; exit 1; } require(){ command -v "$1" > /dev/null 2>&1 || { suggestion=""; if [ -n "$2" ]; then suggestion=" $2"; fi; die "$1 is not installed.${suggestion}"; } } # End boilerplate verbose="false" images="true" build="true" group="se" webRoot="/standardebooks.org/web" webUrl="https://standardebooks.org" lastPushHash="" epubcheck="true" if [ $# -eq 0 ]; then usage fi while [ $# -gt 0 ]; do case "$1" in -h|--help) usage ;; -v|--verbose) verbose="true" shift 1 ;; -g|--group) [ -n "$2" ] || die "Group can’t be empty." group="$2" shift 2 ;; --webroot) [ -n "$2" ] || die "Web root can’t be empty." webRoot="$2" shift 2 ;; --weburl) [ -n "$2" ] || die "Web URL can’t be empty." webUrl="$2" shift 2 ;; -l|--last-push-hash) [ -n "$2" ] || die "Last commit hash can’t be empty." lastPushHash="$2" shift 2 ;; --no-images) images="false" shift 1 ;; --no-epubcheck) epubcheck="false" shift 1 ;; *) break ;; esac done if ! [ "$(getent group "${group}")" ]; then die "Group ${group} does not exist. Either use --group or create the group." fi if ! [ -d "${webRoot}" ]; then die "${webRoot} does not exist or is not a directory." fi # Check for dependencies require "convert" "Try: apt-get install imagemagick" require "git" "Try: apt-get install git" require "php" "Try: apt-get install php-cli" require "se" "Read: https://standardebooks.org/tools" # go-avif is compiled from https://github.com/Kagami/go-avif/ scriptsDir="$(dirname "$(readlink -f "$0")")" if ! [ -x "${scriptsDir}"/reset-php-fpm-opcache ]; then die "\"${scriptsDir}\"/reset-php-fpm-opcache is not an executable file." fi if ! [ -f "${scriptsDir}"/generate-opds.php ]; then die "\"${scriptsDir}\"/generate-opds.php\" is not a file or could not be found." fi if ! [ -f "${scriptsDir}"/generate-rss.php ]; then die "\"${scriptsDir}\"/generate-rss.php\" is not a file or could not be found." fi mkdir -p "${webRoot}"/www/images/covers/ for dir in "$@" do if [ "${dir}" = "" ]; then continue fi repoDir=$(realpath "${dir%/}") baseName=$(basename "${repoDir}") if [ "${baseName}" = ".git" ]; then continue fi if [ ! -d "${repoDir}" ]; then die "Invalid repo root: ${repoDir}" fi if ! pushd "${repoDir}" > /dev/null; then echo "Couldn't cd into ${repoDir}. Skipping." continue fi if [ "${verbose}" = "true" ]; then printf "Entering %s\n" "${repoDir}" fi if git show HEAD:src/epub/content.opf | grep --quiet --extended-regexp "1900-01-01T00:00:00Z"; then printf "Looks like a draft ebook, skipping\n" continue fi if [ "${lastPushHash}" != "" ]; then # We were passed the hash of the last push before this one. # Check to see if the cover image changed, to decide if we want to rebuild the cover image thumbnail/hero diff=$(git diff "${lastPushHash}" HEAD) if [[ "${diff}" =~ diff\ --git\ a/images/cover.jpg ]] || [[ "${diff}" =~ diff\ --git\ a/images/cover.svg ]]; then images="true" else images="false" fi # Check to see if the actual ebook changed, to decide if we want to build if [[ "${diff}" =~ diff\ --git\ a/src/ ]]; then build="true" else build="false" fi fi webDir=$(git show HEAD:src/epub/content.opf | grep --only-matching --extended-regexp "url:https://standardebooks.org/ebooks/[^<]+<\/dc:identifier>" | sed --regexp-extended "s/<[^>]+?>//g" | sed --regexp-extended "s/^url:https:\/\/standardebooks.org\/ebooks\/?//") if [ "${webDir}" = "" ]; then die "Empty webdir!" fi workDir=$(mktemp -d) imgWorkDir=$(mktemp -d) webDir="${webRoot}/www/ebooks/${webDir}" if [ "${images}" = "true" ]; then if [ "${verbose}" = "true" ]; then printf "Generating cover image for web ... " fi urlSafeIdentifier=$(git show HEAD:src/epub/content.opf | grep --only-matching --extended-regexp "url:https://standardebooks.org/ebooks/[^<]+<\/dc:identifier>" | sed --regexp-extended "s/<[^>]+?>//g" | sed --regexp-extended "s|url:https://standardebooks.org/ebooks/||g" | sed --regexp-extended "s|/|_|g") git show HEAD:images/cover.jpg > "${imgWorkDir}/${urlSafeIdentifier}.jpg" git show HEAD:images/cover.svg > "${imgWorkDir}/${urlSafeIdentifier}.svg" # We have to cd into the work dir, otherwise convert won't pick up the relative path of the jpg background in cover.svg pushd "${imgWorkDir}" > /dev/null || exit # Build the hero image for individual ebook pages # Resize and crop the image to 2156 width, 720 height, and starting at the coords 0,1078 convert -resize "1318" -crop "1318x439+0+659" -sampling-factor 4:2:0 -strip -quality 80 -colorspace RGB -interlace JPEG "${imgWorkDir}/${urlSafeIdentifier}.jpg" "${imgWorkDir}/${urlSafeIdentifier}-hero.jpg" convert -resize "2636" -crop "2636x860+0+1318" -sampling-factor 4:2:0 -strip -quality 80 -colorspace RGB -interlace JPEG "${imgWorkDir}/${urlSafeIdentifier}.jpg" "${imgWorkDir}/${urlSafeIdentifier}-hero@2x.jpg" "${scriptsDir}"/go-avif -e "${imgWorkDir}/${urlSafeIdentifier}-hero.jpg" -o "${imgWorkDir}/${urlSafeIdentifier}-hero.avif" --best "${scriptsDir}"/go-avif -e "${imgWorkDir}/${urlSafeIdentifier}-hero@2x.jpg" -o "${imgWorkDir}/${urlSafeIdentifier}-hero@2x.avif" --best # Build the cover image thumbnail # We use JPG instead of SVG for several reasons: # 1. A JPG is roughly 1/2 the file size of the same SVG, because the SVG must contain the JPG in base64 encoding # 2. The "scale up" effect on mouse hover is blurry when used on SVGs. sed -i "s/cover\.jpg/${urlSafeIdentifier}\.jpg/g" "${imgWorkDir}/${urlSafeIdentifier}.svg" # Resize and compress the image (formula from Google Page Speed Insights) convert -resize "242" -sampling-factor 4:2:0 -strip -quality 80 -colorspace RGB -interlace JPEG "${imgWorkDir}/${urlSafeIdentifier}.svg" "${imgWorkDir}/${urlSafeIdentifier}-cover.jpg" convert -resize "484" -sampling-factor 4:2:0 -strip -quality 80 -colorspace RGB -interlace JPEG "${imgWorkDir}/${urlSafeIdentifier}.svg" "${imgWorkDir}/${urlSafeIdentifier}-cover@2x.jpg" # go-avif is extremely slow and should be replaced with something faster ASAP "${scriptsDir}"/go-avif -e "${imgWorkDir}/${urlSafeIdentifier}-cover.jpg" -o "${imgWorkDir}/${urlSafeIdentifier}-cover.avif" --best "${scriptsDir}"/go-avif -e "${imgWorkDir}/${urlSafeIdentifier}-cover@2x.jpg" -o "${imgWorkDir}/${urlSafeIdentifier}-cover@2x.avif" --best sudo chgrp --preserve-root --recursive "${group}" "${imgWorkDir}/${urlSafeIdentifier}"* sudo chmod --preserve-root --recursive g+w "${imgWorkDir}/${urlSafeIdentifier}"* # Remove unused images so we can copy the rest over with a glob rm "${imgWorkDir}/${urlSafeIdentifier}".{jpg,svg} if [ "${verbose}" = "true" ]; then printf "Done.\n" fi popd > /dev/null || die "Couldn't pop directory." fi if [ "${build}" = "true" ]; then if [ "${verbose}" = "true" ]; then printf "Building ebook ... " fi git clone --quiet "${repoDir}" "${workDir}" mkdir "${workDir}/dist" # Build the ebook if [ "${epubcheck}" = "true" ]; then if ! se build --output-dir="${workDir}/dist" --check --kindle --kobo --covers "${workDir}"; then rm --preserve-root --recursive --force "${workDir}" die "Error building ebook, stopping deployment." fi else if ! se build --output-dir="${workDir}/dist" --kindle --kobo --covers "${workDir}"; then rm --preserve-root --recursive --force "${workDir}" die "Error building ebook, stopping deployment." fi fi if [ "${verbose}" = "true" ]; then printf "Done.\n" fi # Get the last commit date so that we can update the modified timestamp in # deployed content.opf. generate-opds uses this timestamp in its output. modifiedDate=$(TZ=UTC git log --date=iso-strict-local -1 --pretty=tformat:"%cd" --abbrev-commit | sed "s/+00:00/Z/") sed --in-place --regexp-extended "s/.+?<\/meta>/${modifiedDate}<\/meta>/" "${workDir}/src/epub/content.opf" # Delete the contents of the old webdir rm --preserve-root --recursive --force "${webDir}" # Re-create the webdir mkdir -p "${webDir}" # Move contents of the work dir over mv "${workDir}"/* "${webDir}/" fi if [ "${images}" = "true" ]; then # Move the cover images over mv "${imgWorkDir}/${urlSafeIdentifier}"*.{jpg,avif} "${webRoot}/www/images/covers/" fi # Delete the now-empty work dir (empty except for .git) rm --preserve-root --recursive --force "${workDir}" "${imgWorkDir}" sudo chgrp --preserve-root --recursive "${group}" "${webDir}" sudo chmod --preserve-root --recursive g+ws "${webDir}" sudo chgrp --preserve-root --recursive "${group}" "${webRoot}/www/images/covers/" sudo chmod --preserve-root --recursive g+ws "${webRoot}/www/images/covers/" if [ "${verbose}" = "true" ]; then printf "Rebuilding web library cache ... " fi "${scriptsDir}"/rebuild-library-cache if [ "${verbose}" = "true" ]; then printf "Done.\n" fi popd > /dev/null || die "Couldn't pop directory." done # Build the OPDS catalog if [ "${verbose}" = "true" ]; then printf "Rebuilding OPDS catalog ... " fi php "${scriptsDir}/generate-opds.php" --webroot "${webRoot}" --weburl "${webUrl}" sudo chown --recursive se:committers /standardebooks.org/web/www/opds/* sudo chmod --recursive 664 /standardebooks.org/web/www/opds/*.xml sudo chmod --recursive 664 /standardebooks.org/web/www/opds/*/*.xml sudo chown --recursive se:committers /standardebooks.org/web/www/rss/* sudo chmod --recursive 664 /standardebooks.org/web/www/rss/*.xml sudo chmod 775 /standardebooks.org/web/www/opds/subjects if [ "${verbose}" = "true" ]; then printf "Done.\n" fi # Build the new releases RSS feed if [ "${verbose}" = "true" ]; then printf "Rebuilding new releases RSS feed ... " fi bash -c "php \"${scriptsDir}\"/generate-rss.php --webroot \"${webRoot}\" --weburl \"${webUrl}\" > \"${webRoot}\"/www/rss/new-releases.xml" if [ "${verbose}" = "true" ]; then printf "Done.\n" fi