first step in protecting against crawlers

This commit is contained in:
Matthias Johnson 2026-03-05 21:27:59 -07:00
parent 796428cbda
commit 51d09b08d5
2 changed files with 10 additions and 0 deletions

View file

@ -0,0 +1,2 @@
[Definition]
failregex = .*"remote_ip":"<HOST>".*

View file

@ -14,3 +14,11 @@ filter = caddy-auth
maxretry = 40
findtime = 10m
bantime = 1h
[caddy-crawler]
enabled = true
journalmatch = CONTAINER_NAME=caddy
filter = caddy-crawler
maxretry = 200
findtime = 1m
bantime = 6h