# New Spice Grill Robots.txt # Main directives for all bots (including search engines) User-agent: * Allow: / Disallow: /config/ Disallow: /search/ Disallow: /account/ Disallow: /api/ Allow: /api/ui-extensions/ Disallow: /static/ Disallow: /*?*author=* Disallow: /*?*tag=* Disallow: /*?*month=* Disallow: /*?*view=* Disallow: /*?*format=* # Search engine specific directives (Google, Bing, etc.) # Block llms.txt from search engine indexing but allow crawling User-agent: Googlebot User-agent: Bingbot User-agent: Slurp User-agent: DuckDuckBot User-agent: Baiduspider User-agent: YandexBot Disallow: /llms.txt # AI Bot specific directives # Allow AI bots to access llms.txt but block legal pages User-agent: GPTBot User-agent: ChatGPT-User User-agent: CCBot User-agent: anthropic-ai User-agent: Google-Extended User-agent: FacebookBot User-agent: Claude-Web User-agent: cohere-ai Allow: / Allow: /llms.txt Disallow: /privacy-policy.html Disallow: /terms-of-use.html # Google Ads Bot User-agent: AdsBot-Google User-agent: AdsBot-Google-Mobile User-agent: AdsBot-Google-Mobile-Apps Allow: / # Sitemap location Sitemap: https://newspicegrill.com/sitemap.xml # Crawl delay for heavy bots User-agent: Baiduspider Crawl-delay: 10