# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
User-agent: *
Disallow: /*?*
Disallow: *?more_reviews*
Allow: /users/sign_up
Allow: /users/sign_in
Allow: /users/sign_up?from=rent_button
Allow: /*?page
Allow: /voitures?brand_name=
Sitemap: https://www.roadstr.fr/sitemap.xml
Sitemap: https://www.roadstr.fr/blog/sitemap.xml
User-agent: amazonbot
Disallow: /
User-agent: anthropic-ai
Disallow: /
User-agent: bytespider
Disallow: /
User-agent: ccbot
Disallow: /
#User-agent: chatgpt-user
#Disallow: /
User-agent: claudebot
Disallow: /
User-agent: diffbot
Disallow: /
User-agent: friendlycrawler
Disallow: /
#User-agent: gptbot
#Disallow: /
User-agent: img2dataset
Disallow: /
User-agent: imagesiftbot
Disallow: /
User-agent: magpie-crawler
Disallow: /
User-agent: meta-externalagent
Disallow: /
User-agent: omgili
Disallow: /
User-agent: openai
Disallow: /
User-agent: spawning-ai
Disallow: /
User-agent: the knowledge ai
Disallow: /
User-agent: timpibot
Disallow: /
User-agent: webzio
Disallow: /
User-agent: youbot
Disallow: /