first step in protecting against crawlers
This commit is contained in:
parent
796428cbda
commit
51d09b08d5
2 changed files with 10 additions and 0 deletions
2
roles/fail2ban/files/filter.d/caddy-crawler.conf
Normal file
2
roles/fail2ban/files/filter.d/caddy-crawler.conf
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
[Definition]
|
||||||
|
failregex = .*"remote_ip":"<HOST>".*
|
||||||
|
|
@ -14,3 +14,11 @@ filter = caddy-auth
|
||||||
maxretry = 40
|
maxretry = 40
|
||||||
findtime = 10m
|
findtime = 10m
|
||||||
bantime = 1h
|
bantime = 1h
|
||||||
|
|
||||||
|
[caddy-crawler]
|
||||||
|
enabled = true
|
||||||
|
journalmatch = CONTAINER_NAME=caddy
|
||||||
|
filter = caddy-crawler
|
||||||
|
maxretry = 200
|
||||||
|
findtime = 1m
|
||||||
|
bantime = 6h
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue