# robots.txt file to dissuade over-enthusiastic crawling. # copied to all sites 15/7/24 # dotbot isn't following delay to moved to disallow # disallow User-agent: * Disallow: /*.pdf$ Disallow: /*.doc$ Disallow: /*.docx$ Disallow: /*.xls$ Disallow: /*.xlsx$ Disallow: /*.ppt$ Disallow: /*.pptx$ Disallow: /*.rtf$ Disallow: /*.mp3$ Disallow: /*/documents/ User-agent: Googlebot-Image Disallow: / User-agent: grapeshot Disallow: / User-agent: GPTBot Disallow: / User-agent: PetalBot Disallow: / User-agent: Bytespider Disallow: / User-agent: dotbot Disallow: / User-agent: SemrushBot Disallow: / User-agent: SeekportBot Disallow: / User-agent: Barkrowler Disallow: / User-agent: MJ12bot Disallow: / # delay User-agent: googlebot Crawl-delay: 1 User-agent: applebot Crawl-delay: 3 User-agent: bingbot Crawl-delay: 3 User-agent: GeedoProductSearch Crawl-delay: 20 User-Agent: serpstatbot Crawl-Delay: 20 User-agent: coccocbot Crawl-delay: 20 User-agent: Sogou web spider Crawl-Delay: 20 User-agent: BLEXBot Crawl-delay: 20