## robots.txt 

## GENERAL SETTINGS

## Enable robots.txt rules for all crawlers
User-agent: Googlebot
User-agent: Slurp
User-agent: Bingbot
User-agent: MSNBot
User-agent: MSNBot-Media
User-agent: Mediapartners-Google
User-agent: Googlebot-Image
User-agent: Yahoo-MMCrawler
User-agent: Baiduspider
User-agent: FacebookExternalHit
User-agent: Facebot
User-agent: SemrushBot
Disallow:

## Crawl-delay parameter: number of seconds to wait between successive requests to the same server.
## Set a custom crawl rate if you're experiencing traffic problems with your server.
# Crawl-delay: 30

User-agent: SemrushBot
Crawl-delay: 20
User-agent: AhrefsBot
Crawl-delay: 20

Disallow: /member-centre/*
Disallow: /newsletter/
Disallow: /video-library/
Disallow: /video_gallery/


## Magento sitemap: uncomment and replace the URL to your Magento sitemap file
# Sitemap: http://www.example.com/sitemap/sitemap.xml

## DEVELOPMENT RELATED SETTINGS

## Do not crawl development files and folders: CVS, svn directories and dump files
Disallow: /CVS
Disallow: /*.svn$
Disallow: /*.idea$
Disallow: /*.sql$
Disallow: /*.tgz$

## SERVER SETTINGS

## Do not crawl common server technical folders and files
Disallow: /cgi-bin/
Disallow: /cleanup.php
Disallow: /apc.php
Disallow: /memcache.php
Disallow: /phpinfo.php

## IMAGE CRAWLERS SETTINGS

## Extra: Uncomment if you do not wish Google and Bing to index your images
# User-agent: Googlebot-Image
# Disallow: /
# User-agent: msnbot-media
# Disallow: /

User-agent: *