Add configuration to stop rude bot crawlers using fail2ban

This commit is contained in:
Alex Cabal 2025-01-18 17:33:00 -06:00
parent 8f2df6ac12
commit 5b0d37824b
3 changed files with 9 additions and 1 deletions

View file

@ -61,7 +61,7 @@ for filename in ${directory}/${filenameBase}.*; do
# ipv6loganon is provided by the `ipv6calc` package # ipv6loganon is provided by the `ipv6calc` package
grep --extended-regexp "\[${grepString}" "${filename}" | ipv6loganon --anonymize-paranoid >> "${directory}/${logMonth}/${logFilename}" grep --extended-regexp "\[${grepString}" "${filename}" | ipv6loganon --anonymize-paranoid >> "${directory}/${logMonth}/${logFilename}"
gzip --best "${directory}/${logMonth}/${logFilename}" gzip --force --best "${directory}/${logMonth}/${logFilename}"
chown --preserve-root --recursive www-data:adm "${directory}/${logMonth}" chown --preserve-root --recursive www-data:adm "${directory}/${logMonth}"
chmod --preserve-root --recursive g+w "${directory}/${logMonth}" chmod --preserve-root --recursive g+w "${directory}/${logMonth}"
@ -70,3 +70,6 @@ for filename in ${directory}/${filenameBase}.*; do
rm "${filename}" rm "${filename}"
fi fi
done done
# Set this for `fail2ban` to use.
ln -s "$1" "${directory}"/current.log

View file

@ -119,6 +119,8 @@ if(!$isXslt){
</div> </div>
<? } ?> <? } ?>
<a href="/">Standard Ebooks</a> <a href="/">Standard Ebooks</a>
<? /* This link is hidden to regular users, and also disallowed by `robots.txt`. If a rude bot crawls this URL, `fail2ban` bans the IP for 24 hours. See `./config/fail2ban/filter.d/se.conf`. */ ?>
<a href="/honeypot" hidden="hidden">Following this link will ban your IP for 24 hours</a>
<nav> <nav>
<ul> <ul>
<li> <li>

View file

@ -1 +1,4 @@
Sitemap: https://standardebooks.org/sitemap Sitemap: https://standardebooks.org/sitemap
User-agent: *
Disallow: /honeypot