# A list of misbehaving crawlers. # originally from http://aardling.com/robots.txt User-agent: DOC Disallow: / User-agent: Download Ninja Disallow: / User-agent: Zao Disallow: / # Some bots are known to be trouble, particularly those designed to copy entire sites. User-agent: Fetch Disallow: / User-agent: HTTrack Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: linko Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: MSIECrawler Disallow: / User-agent: Offline Explorer Disallow: / User-agent: sitecheck.internetseer.com Disallow: / User-agent: SiteSnagger Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: UbiCrawler Disallow: / User-agent: WebCopier Disallow: / User-agent: WebStripper Disallow: / User-agent: WebZIP Disallow: / User-agent: Xenu Disallow: / User-agent: Zealbot Disallow: / User-agent: ZyBORG Disallow: / # Wget in its recursive mode is a frequent problem. User-agent: wget Disallow: / # The 'grub' distributed client has been *very* poorly behaved. User-agent: grub-client Disallow: / # Doesn't follow robots.txt anyway, but... User-agent: k2spider Disallow: / # Hits many times per second, not acceptable. # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit. # http://www.webreaper.net/ User-agent: WebReaper Disallow: / # Obviously don't want _these_ User-agent: zombies Disallow: /brains # These rules apply to everyone else. User-agent: * Disallow: /?page=* Disallow: /admin Disallow: /admin/* Sitemap: https://www.ansarada.com/sitemap.xml