Skip to content

Commit 1800b0c

Browse files
committed
feat(robots): add configuration to disable web crawlers
1 parent 24205ec commit 1800b0c

File tree

1 file changed

+26
-0
lines changed

1 file changed

+26
-0
lines changed

template/block/disable-robots.conf

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# Nginx UI Template Start
2+
name = "Disable Robots"
3+
author = "@0xJacky"
4+
description = { en = "Disable Robots", zh_CN = "禁止搜索引擎爬虫"}
5+
[variables.userAgents]
6+
type = "string"
7+
name = { en = "User Agents", zh_CN = "用户代理"}
8+
value = "Googlebot|Bingbot|Baiduspider|YandexBot|Slurp|DuckDuckBot|Sogou|360Spider|facebot|AhrefsBot|SEMrushBot"
9+
# Nginx UI Template End
10+
11+
# Nginx UI Custom Start
12+
map $http_user_agent $is_bad_bot {
13+
default 0;
14+
~*({{ .userAgents }}) 1;
15+
}
16+
# Nginx UI Custom End
17+
18+
if ($is_bad_bot) {
19+
return 444;
20+
}
21+
22+
location = /robots.txt {
23+
add_header Content-Type text/plain;
24+
add_header Cache-Control "no-store, no-cache, must-revalidate";
25+
return 200 "User-agent: *\nDisallow: /\n";
26+
}

0 commit comments

Comments
 (0)