Files
akmon/doc_seo/templates/robots.txt
2026-01-20 08:04:15 +08:00

40 lines
670 B
Plaintext

# robots.txt (template)
# Adjust Allow/Disallow and Crawl-delay per your policy.
User-agent: *
Allow: /
# Disallow sensitive or duplicate areas
Disallow: /admin/
Disallow: /api/
Disallow: /cart/
# Sitemaps
Sitemap: https://example.com/sitemap_index.xml
# Optional crawl pacing (not all bots honor this)
# Crawl-delay: 2
# AI/LLM bots (adjust per policy)
User-agent: GPTBot
Allow: /
User-agent: CCBot
Allow: /
User-agent: ClaudeBot
Allow: /
User-agent: PerplexityBot
Allow: /
User-agent: Google-Extended
Allow: /
User-agent: Applebot
Allow: /
User-agent: Bytespider
Allow: /
# If you want to restrict some bots, switch Allow to Disallow or remove their block.