Files
SukiSU-Ultra/Website/docs/public/robots.txt

77 lines
1.1 KiB
Plaintext

User-agent: *
Allow: /
Disallow: /admin/
Disallow: /.git/
Disallow: /node_modules/
Disallow: /api/
Disallow: /.vitepress/
# Sitemap
Sitemap: https://sukisu.org/sitemap.xml
# Crawl-delay for high-traffic optimization
Crawl-delay: 0.5
# Major search engines (global optimization)
User-agent: Googlebot
Allow: /
Crawl-delay: 0.5
User-agent: Bingbot
Allow: /
Crawl-delay: 1
User-agent: Slurp
Allow: /
Crawl-delay: 1
User-agent: DuckDuckBot
Allow: /
Crawl-delay: 0.5
User-agent: Baiduspider
Allow: /
Crawl-delay: 2
# Asian search engines (for China, Japan, etc.)
User-agent: YandexBot
Allow: /
Crawl-delay: 1
User-agent: NaverBot
Allow: /
Crawl-delay: 1
User-agent: SogouSpider
Allow: /
Crawl-delay: 2
# Block resource-intensive bots for performance
User-agent: AhrefsBot
Disallow: /
User-agent: MJ12bot
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: DotBot
Disallow: /
# Block AI training crawlers to save bandwidth
User-agent: GPTBot
Disallow: /
User-agent: ChatGPT-User
Disallow: /
User-agent: CCBot
Disallow: /
User-agent: anthropic-ai
Disallow: /
User-agent: Claude-Web
Disallow: /