# This file is for instructing search engine crawlers. # --- BEGIN CUSTOM BOT BLOCKING RULES --- # Block PerplexityBot User-agent: PerplexityBot Disallow: / # Block Perplexity-User User-agent: Perplexity-User Disallow: / # Block TikTok Spider User-agent: TikTokSpider Disallow: / # Block SemrushBot User-agent: SemrushBot Disallow: / # Block ChatGPT-User (OpenAI's crawler) User-agent: ChatGPT-User Disallow: / # Block GPTBot (OpenAI's crawler) User-agent: GPTBot Disallow: / # Block ClaudeBot (Anthropic's crawler) User-agent: ClaudeBot Disallow: / # Block AliyunSecBot (Alibaba) User-agent: AliyunSecBot Disallow: / # Block MJ12bot User-agent: MJ12bot Disallow: / # Block PetalBot User-agent: PetalBot Disallow: / # Block Sogou web spider User-agent: Sogou web spider Disallow: / # Block DotBot (Mozilla helping SEO companies) User-agent: DotBot Disallow: / # --- END CUSTOM BOT BLOCKING RULES --- # General rules for all bots User-agent: * Disallow: /wp-admin/ Allow: /wp-admin/admin-ajax.php Disallow: /wp-includes/ Crawl-delay: 20 # Specific rule for Googlebot User-agent: Googlebot Crawl-delay: 20 # Specific rule for meta-externalagent User-agent: meta-externalagent Crawl-delay: 20 # Specific rule for Bingbot User-agent: Bingbot Crawl-delay: 20 # Specific rule for Amazonbot User-agent: Amazonbot Crawl-delay: 30 # Specific rule for Bytespider User-agent: Bytespider Crawl-delay: 30 # Sitemap directive (replace with your actual sitemap URL if different) Sitemap: https://www.mhsystem.org/sitemap.xml