# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
User-agent: *
Sitemap: https://contexttravel.com/sitemap.xml.gz
# Disallow search pages from being indexed because we'll be penalized for
# duplicate content
Disallow: /*search?q=
# Disallow URLs that adds tour items into the cart
Disallow: /*/timed_booker_searches/new
Disallow: /*/group_optimized_booker_searches/new
Disallow: /*/shopping_cart_items/new
Disallow: /cart$
# Paginated Reviews:
Disallow: /*/feedbacks
Disallow: /review_block_components/