# Define access-restrictions for robots/spiders # http://www.robotstxt.org/wc/norobots.html # By default we allow robots to access all areas of our site # already accessible to anonymous users User-agent: * Disallow: /*sendto_form$ Disallow: /*folder_factories$ Disallow: /app/* Disallow: /pls/* Disallow: /companies/app/* Disallow: /fsp/app/* Disallow: /data/app/* Disallow: /@@searchlite* Disallow: /ajax-search* Disallow: /do-it-online/register-a-company/addresssearch* Disallow: /do-it-online/register-a-company/companydetails/* Disallow: /do-it-online/register-a-company/directorsearch* Disallow: /do-it-online/register-a-company/shareholdersearch* Disallow: /*pls/web* Disallow:/*online-services* Disallow:/*your-online-account* Disallow: /*ppsr2-web/search* Disallow: /java/* Disallow: /*scan*es* # Add Googlebot-specific syntax extension to exclude forms # that are repeated for each piece of content in the site # the wildcard is only supported by Googlebot # http://www.google.com/support/webmasters/bin/answer.py?answer=40367&ctx;=sibling