Drop spiders, we don't need them anymore, and add a grafana config
This commit is contained in:
@@ -1,16 +1,12 @@
|
|||||||
# Define networks to be used later below
|
|
||||||
networks:
|
|
||||||
# Networks will get included from snippets
|
|
||||||
|
|
||||||
|
|
||||||
challenges:
|
challenges:
|
||||||
# Challenges will get included from snippets
|
dnsbl:
|
||||||
|
runtime: dnsbl
|
||||||
|
parameters:
|
||||||
|
dnsbl-host: "dnsbl.dronebl.org"
|
||||||
|
dnsbl-decay: 1h
|
||||||
|
dnsbl-timeout: 1s
|
||||||
|
|
||||||
conditions:
|
conditions:
|
||||||
# Conditions will get replaced on rules AST when found as ($condition-name)
|
|
||||||
|
|
||||||
# Conditions will get included from snippets
|
|
||||||
|
|
||||||
|
|
||||||
is-static-asset:
|
is-static-asset:
|
||||||
- 'path == "/apple-touch-icon.png"'
|
- 'path == "/apple-touch-icon.png"'
|
||||||
@@ -51,25 +47,13 @@ rules:
|
|||||||
- 'path.startsWith("/hls/")'
|
- 'path.startsWith("/hls/")'
|
||||||
action: pass
|
action: pass
|
||||||
|
|
||||||
- name: desired-crawlers
|
- name: allow-private-networks
|
||||||
conditions:
|
conditions:
|
||||||
- *is-bot-googlebot
|
# Allows localhost and private networks CIDR
|
||||||
- *is-bot-bingbot
|
- *is-network-localhost
|
||||||
- *is-bot-duckduckbot
|
- *is-network-private
|
||||||
- *is-bot-kagibot
|
|
||||||
- *is-bot-qwantbot
|
|
||||||
- *is-bot-yandexbot
|
|
||||||
action: pass
|
action: pass
|
||||||
|
|
||||||
# Matches private networks and localhost.
|
|
||||||
# Uncomment this if you want to let your own tools this way
|
|
||||||
#- name: allow-private-networks
|
|
||||||
# conditions:
|
|
||||||
# # Allows localhost and private networks CIDR
|
|
||||||
# - *is-network-localhost
|
|
||||||
# - *is-network-private
|
|
||||||
# action: pass
|
|
||||||
|
|
||||||
- name: undesired-crawlers
|
- name: undesired-crawlers
|
||||||
conditions:
|
conditions:
|
||||||
- '($is-headless-chromium)'
|
- '($is-headless-chromium)'
|
||||||
@@ -94,6 +78,15 @@ rules:
|
|||||||
- 'userAgent.contains("Amazonbot") || userAgent.contains("Google-Extended") || userAgent.contains("PanguBot") || userAgent.contains("AI2Bot") || userAgent.contains("Diffbot") || userAgent.contains("cohere-training-data-crawler") || userAgent.contains("Applebot-Extended")'
|
- 'userAgent.contains("Amazonbot") || userAgent.contains("Google-Extended") || userAgent.contains("PanguBot") || userAgent.contains("AI2Bot") || userAgent.contains("Diffbot") || userAgent.contains("cohere-training-data-crawler") || userAgent.contains("Applebot-Extended")'
|
||||||
# SEO / Ads and marketing
|
# SEO / Ads and marketing
|
||||||
- 'userAgent.contains("BLEXBot")'
|
- 'userAgent.contains("BLEXBot")'
|
||||||
|
# Yandex isn't catched, and doesn't seem to care about robots.txt
|
||||||
|
- 'userAgent.contains("YandexBot/3.0; +http://yandex.com/bots)"'
|
||||||
|
# At this point I'd rather not have any search browser crawl the frontend.
|
||||||
|
- *is-bot-googlebot
|
||||||
|
- *is-bot-bingbot
|
||||||
|
- *is-bot-duckduckbot
|
||||||
|
- *is-bot-kagibot
|
||||||
|
- *is-bot-qwantbot
|
||||||
|
- *is-bot-yandexbot
|
||||||
action: drop
|
action: drop
|
||||||
|
|
||||||
- name: unknown-crawlers
|
- name: unknown-crawlers
|
||||||
@@ -110,7 +103,7 @@ rules:
|
|||||||
- name: 0
|
- name: 0
|
||||||
action: check
|
action: check
|
||||||
settings:
|
settings:
|
||||||
challenges: [js-refresh]
|
challenges: [js-refresh, js-pow-sha256]
|
||||||
- name: 1
|
- name: 1
|
||||||
action: check
|
action: check
|
||||||
settings:
|
settings:
|
||||||
@@ -129,12 +122,12 @@ rules:
|
|||||||
# if DNSBL fails, check additional challenges
|
# if DNSBL fails, check additional challenges
|
||||||
fail: check
|
fail: check
|
||||||
fail-settings:
|
fail-settings:
|
||||||
challenges: [js-refresh]
|
challenges: [js-refresh, js-pow-sha256]
|
||||||
|
|
||||||
- name: suspicious-fetchers
|
- name: suspicious-fetchers
|
||||||
action: check
|
action: check
|
||||||
settings:
|
settings:
|
||||||
challenges: [js-refresh]
|
challenges: [js-refresh, js-pow-sha256]
|
||||||
conditions:
|
conditions:
|
||||||
- 'userAgent.contains("facebookexternalhit/") || userAgent.contains("facebookcatalog/")'
|
- 'userAgent.contains("facebookexternalhit/") || userAgent.contains("facebookcatalog/")'
|
||||||
|
|
||||||
@@ -151,6 +144,12 @@ rules:
|
|||||||
context-set:
|
context-set:
|
||||||
# Map OpenGraph or similar <meta> tags back to the reply, even if denied/challenged
|
# Map OpenGraph or similar <meta> tags back to the reply, even if denied/challenged
|
||||||
proxy-meta-tags: "true"
|
proxy-meta-tags: "true"
|
||||||
|
response-headers:
|
||||||
|
# Solves the varnish bug even if we pulled it through a different way.
|
||||||
|
reddit-stats:
|
||||||
|
- io=1
|
||||||
|
via:
|
||||||
|
- 1.1 varnish
|
||||||
|
|
||||||
# Set additional response headers
|
# Set additional response headers
|
||||||
#response-headers:
|
#response-headers:
|
||||||
@@ -177,5 +176,5 @@ rules:
|
|||||||
- name: standard-browser
|
- name: standard-browser
|
||||||
action: challenge
|
action: challenge
|
||||||
settings:
|
settings:
|
||||||
challenges: [preload-link, meta-refresh, resource-load, js-refresh]
|
challenges: [preload-link, meta-refresh, resource-load, js-refresh, js-pow-sha256]
|
||||||
conditions:
|
conditions:
|
||||||
|
@@ -119,6 +119,7 @@ apps:
|
|||||||
FRONT_PAGE: popular
|
FRONT_PAGE: popular
|
||||||
COMMENT_SORT: new
|
COMMENT_SORT: new
|
||||||
PUSHSHIFT_FRONTEND: "undelete.pullpush.io"
|
PUSHSHIFT_FRONTEND: "undelete.pullpush.io"
|
||||||
|
ROBOTS_DISABLE_INDEXING: on
|
||||||
BLUR_NSFW: on
|
BLUR_NSFW: on
|
||||||
USE_HLS: on
|
USE_HLS: on
|
||||||
AUTOPLAY_VIDEOS: off
|
AUTOPLAY_VIDEOS: off
|
||||||
@@ -126,11 +127,13 @@ apps:
|
|||||||
image: git.projectsegfau.lt/midou/go-away:latest
|
image: git.projectsegfau.lt/midou/go-away:latest
|
||||||
ports:
|
ports:
|
||||||
- "6464:9980"
|
- "6464:9980"
|
||||||
|
- "9893:9893"
|
||||||
mounts:
|
mounts:
|
||||||
- "{{data_dir}}/redlib/cache:/cache"
|
- "{{data_dir}}/redlib/cache:/cache"
|
||||||
- "{{configs_dir}}/redlib/policy.yml:/policy.yml:ro"
|
- "{{configs_dir}}/redlib/policy.yml:/policy.yml:ro"
|
||||||
environment:
|
environment:
|
||||||
GOAWAY_BIND: ":9980"
|
GOAWAY_BIND: ":9980"
|
||||||
|
GOAWAY_METRICS_BIND: ":9893"
|
||||||
GOAWAY_BIND_NETWORK: "tcp"
|
GOAWAY_BIND_NETWORK: "tcp"
|
||||||
GOAWAY_CLIENT_IP_HEADER: "X-Real-Ip"
|
GOAWAY_CLIENT_IP_HEADER: "X-Real-Ip"
|
||||||
GOAWAY_POLICY: "/policy.yml"
|
GOAWAY_POLICY: "/policy.yml"
|
||||||
|
Reference in New Issue
Block a user