# Provided courtesy of http://browsers.garykeith.com. # Created on April 12, 2007 at 6:24:06 AM GMT. # # Place this file in the root public folder of your website. # It will stop the following bots from indexing your website. # User-agent: abot Disallow: / User-agent: ALeadSoftbot Disallow: / User-agent: baiduspider Disallow: / User-agent: BeijingCrawler Disallow: / User-agent: BilgiBot Disallow: / User-agent: bot Disallow: / User-agent: botlist Disallow: / User-agent: BOTW Spider Disallow: / User-agent: bumblebee Disallow: / User-agent: Bumblebee Disallow: / User-agent: BuzzRankingBot Disallow: / User-agent: Charlotte Disallow: / User-agent: Clushbot Disallow: / User-agent: Crawler Disallow: / User-agent: CydralSpider Disallow: / User-agent: DataFountains Disallow: / User-agent: DiamondBot Disallow: / User-agent: DoCoMo Disallow: / User-agent: Dulance bot Disallow: / User-agent: DYNAMIC Disallow: / User-agent: EARTHCOM.info Disallow: / User-agent: EDI Disallow: / User-agent: envolk Disallow: / User-agent: Exabot Disallow: / User-agent: Exabot-Images Disallow: / User-agent: Exabot-Test Disallow: / User-agent: exactseek-pagereaper Disallow: / User-agent: Exalead NG Disallow: / User-agent: FANGCrawl Disallow: / User-agent: Feed::Find Disallow: / User-agent: Gigabot Disallow: / User-agent: GigabotSiteSearch Disallow: / User-agent: GurujiBot Disallow: / User-agent: Hatena Antenna Disallow: / User-agent: Hatena Bookmark Disallow: / User-agent: Hatena RSS Disallow: / User-agent: HatenaScreenshot Disallow: / User-agent: Helix Disallow: / User-agent: HiddenMarket Disallow: / User-agent: HyperEstraier Disallow: / User-agent: iaskspider Disallow: / User-agent: InfociousBot Disallow: / User-agent: iVia Disallow: / User-agent: iVia Page Fetcher Disallow: / User-agent: Jetbot Disallow: / User-agent: Kolinka Forum Search Disallow: / User-agent: KRetrieve Disallow: / User-agent: LetsCrawl.com Disallow: / User-agent: Lincoln State Web Browser Disallow: / User-agent: Links4US-Crawler Disallow: / User-agent: LOOQ Disallow: / User-agent: Lsearch/sondeur Disallow: / User-Agent: MJ12bot Disallow: / User-agent: MapoftheInternet.com Disallow: / User-agent: NationalDirectory Disallow: / User-agent: NetCarta_WebMapper Disallow: / User-agent: NewsGator Disallow: / User-agent: NextGenSearchBot Disallow: / User-agent: ng Disallow: / User-agent: nicebot Disallow: / User-agent: NP Disallow: / User-agent: NPBot Disallow: / User-agent: Nudelsalat Disallow: / User-agent: Nutch Disallow: / User-agent: OmniExplorer_Bot Disallow: / User-agent: OpenIntelligenceData Disallow: / User-agent: Oracle Enterprise Search Disallow: / User-agent: Pajaczek Disallow: / User-agent: panscient.com Disallow: / User-agent: PeerFactor 404 crawler Disallow: / User-agent: PeerFactor Crawler Disallow: / User-agent: PlantyNet Disallow: / User-agent: PlantyNet_WebRobot Disallow: / User-agent: plinki Disallow: / User-agent: PMAFind Disallow: / User-agent: Pogodak! Disallow: / User-agent: QuickFinder Crawler Disallow: / User-agent: Radiation Retriever Disallow: / User-agent: Reaper Disallow: / User-agent: RedCarpet Disallow: / User-agent: ScorpionBot Disallow: / User-agent: Scrubby Disallow: / User-agent: Scumbot Disallow: / User-agent: searchbot Disallow: / User-agent: Seeker.lookseek.com Disallow: / User-agent: SeznamBot Disallow: / User-agent: ShowXML Disallow: / User-agent: snap.com Disallow: / User-agent: snap.com beta crawler Disallow: / User-agent: Snapbot Disallow: / User-agent: SnapPreviewBot Disallow: / User-agent: sohu Disallow: / User-agent: SpankBot Disallow: / User-agent: Speedy Spider Disallow: / User-agent: Speedy_Spider Disallow: / User-agent: SpeedySpider Disallow: / User-agent: spider Disallow: / User-agent: SquigglebotBot Disallow: / User-agent: SurveyBot Disallow: / User-agent: SynapticSearch Disallow: / User-agent: T-H-U-N-D-E-R-S-T-O-N-E Disallow: / User-agent: Tagoobot Disallow: / User-agent: Talkro Web-Shot Disallow: / User-agent: Tarantula Disallow: / User-agent: TerrawizBot Disallow: / User-agent: TheInformant Disallow: / User-agent: TMCrawler Disallow: / User-agent: TridentSpider Disallow: / User-agent: TurnitinBot Disallow: / User-agent: Tutorial Crawler Disallow: / User-agent: Twiceler Disallow: / User-agent: URI::Fetch Disallow: / User-agent: VengaBot Disallow: / User-agent: Vonna.com b o t Disallow: / User-agent: Vortex Disallow: / User-agent: Votay bot Disallow: / User-agent: WebAlta Crawler Disallow: / User-agent: Webbot Disallow: / User-agent: Webclipping.com Disallow: / User-agent: WebCorp Disallow: / User-agent: Webinator Disallow: / User-agent: WIRE Disallow: / User-agent: WISEbot Disallow: / User-agent: Xerka WebBot Disallow: / User-agent: XSpider Disallow: / User-agent: yacybot Disallow: / User-agent: YodaoBot Disallow: / User-agent: Yoono Disallow: / User-agent: yoono Disallow: / # Otherwise, let the bot crawl the site, ignoring the following directories # or files. User-agent: * Disallow: /account/botfinder.php Disallow: /beta_docs/ Disallow: /CPU2006/ Disallow: /dev/ Disallow: /images/ Disallow: /img.v4/ Disallow: /include/ Disallow: /license/ Disallow: /OLD/ Disallow: /phpMyAdmin/ Disallow: /phpMyAdmin-3.4.5/ Disallow: /pricing/ Disallow: /pricing_old/ Disallow: /sales/order.php Disallow: /search/ Disallow: /ssl/ Disallow: /styles.v4/ Disallow: /webmail/ Disallow: /webuser/ Disallow: /wm/ Crawl-delay: 180 # See http://search.msn.com/msnbot.htm User-agent: msnbot Crawl-delay: 300 # See http://www.google.com/support/webmasters/bin/topic.py?topic=8843 User-agent: googlebot Crawl-delay: 300 # http://irl.cs.tamu.edu/crawler User-agent: IRLbot Crawl-delay: 300