# Envirofone robots.txt – optimised for efficient Google crawling # Default rule: allow crawling of the whole site User-agent: * Allow: / # Block true non-content / checkout flows Disallow: /en-gb/sell/checkout Disallow: /en-gb/sell/complete Disallow: /en-gb/buy/basket Disallow: /en-gb/buy/delivery Disallow: /en-gb/buy/payment-method Disallow: /en-gb/buy/thank-you # SEO Fixes Disallow: /en-gb/en-gb Disallow: /en-gb/buy/products/en-gb/buy/products/ # Keep technical / protection paths out of the index Disallow: /cdn-cgi/l/email-protection # DO NOT block /_next/ or all URLs with ? or & # Google needs JS/CSS and some parameter URLs for proper rendering. # If you have noisy tracking params, block only those specifically, e.g.: # Disallow: /*?utm_ # Disallow: /*&utm_ # Disallow: /*?gclid= # Disallow: /*&gclid= # Explicitly allow major AI research crawlers (optional, non-Google) User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: Google-Extended Allow: / User-agent: CCBot Allow: / User-agent: PerplexityBot Allow: / # Sitemap helps discovery – dynamically generated, refreshed every 12 hours Sitemap: https://www.envirofone.com/sitemap.xml