# robots.txt file for travel website # Disallow all crawlers from accessing admin pages and private directories User-agent: * Disallow: /admin/ Disallow: /login/ Disallow: /register/ Disallow: /user-data/ Disallow: /cart/ Disallow: /checkout/ Disallow: /private/ Disallow: /search-results? # Allow specific search engines to access certain pages User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / # Sitemap for search engines Sitemap: https://www.divassojourn.com/sitemap.xml