# robots.txt for http://www.firststop.eu # copied from the mammut project # For all robots User-agent: * # Block access to specific groups of pages Disallow: /shared/404.html Disallow: /customerLogin.action Disallow: /extranetPosController Disallow: /myAccount Disallow: /cart Disallow: /checkout Crawl-delay: 10 # 10 seconds between page requests # Allow search crawlers to discover the sitemap Sitemap: http://www.firststop.eu/sitemap-index.xml # Block CazoodleBot as it does not present correct accept content headers User-agent: CazoodleBot Disallow: / # Block MJ12bot as it is just noise User-agent: MJ12bot Disallow: / # Block dotbot as it cannot parse base urls properly User-agent: dotbot/1.0 Disallow: / # Block Gigabot User-agent: Gigabot Disallow: /