# Robots.txt last edited by Jos Jonkeren 2022-11-03 15:16 # Crawlers Setup user-agent: * crawl-delay: 5 # Google doesn't support the crawl-delay directive, so her crawlers will just ignore it. # Bing: set crawl to maximum one page per 5 seconds. # Sitemap location sitemap: https://www.pietzoomers.com/sitemap.xml # Allowable Index allow: /*?page= allow: /* # Disallow different URL parameters to be indexed disallow: *brand[filter]* disallow: *color_group[filter]* disallow: *size[filter]* disallow: *price[filter]* disallow: *salelabel[filter]* disallow: *hoofdgroep[filter]* # Disallow directories disallow: /404/ disallow: /app/ disallow: /cgi-bin/ disallow: /downloader/ disallow: /includes/ disallow: /js/ disallow: /lib/ disallow: /magento/ disallow: /pkginfo/ disallow: /report/ disallow: /skin/ disallow: /stats/ disallow: /var/ # Disallow paths (clean URLs) disallow: /index.php/ disallow: /catalog/product_compare/ disallow: /catalog/category/view/ disallow: /catalog/product/view/ disallow: /catalogsearch/ disallow: /checkout/ disallow: /control/ disallow: /contacts/ disallow: /customer/ disallow: /customize/ disallow: /newsletter/ disallow: /poll/ disallow: /review/ disallow: /sendfriend/ disallow: /tag/ disallow: /wishlist/ # Disallow files disallow: /cron.php disallow: /cron.sh disallow: /error_log disallow: /install.php disallow: /LICENSE.html disallow: /LICENSE.txt disallow: /LICENSE_AFL.txt disallow: /STATUS.txt # Do not index session ID disallow: /*?SID= disallow: /*.php$ # Disllow paths (no clean URLs) disallow: /*.js$ disallow: /*.css$ disallow: /*.php$ disallow: /*?p=*& disallow: /*?SID=