# $Id: robots.txt # # This is a file retrieved by webwalkers a.k.a. spiders that # conform to a defacto standard. # See # # Format is: # User-agent: # Disallow: | # ----------------------------------------------------------------------------- User-agent: nys-crawler Disallow: User-agent: WC3-checklink Disallow: User-agent: Googlebot Disallow: User-agent: Inktomi Slurp Disallow: User-agent: MSNBot Disallow: User-agent: AskJeeves Disallow: User-agent: InfoSeek Robot 1.0 Disallow: User-agent: InfoSeek Sidewinder Disallow: User-agent: nys-qa-crawler Disallow: User-agent: * Disallow: / Disallow: // Disallow: /portal/page/portal/ Disallow: /pls/ Disallow: /portalHelp2/ Disallow: /portal/pls/ Disallow: /tmp