# Robots.txt for 1ly.store # Allow all bots to crawl public pages # Block sensitive areas # Social Media Crawlers FIRST - Allow OG image access User-agent: Twitterbot Allow: / Allow: /api/og User-agent: facebookexternalhit Allow: / Allow: /api/og User-agent: WhatsApp Allow: / Allow: /api/og User-agent: LinkedInBot Allow: / Allow: /api/og User-agent: Slackbot Allow: / Allow: /api/og User-agent: TelegramBot Allow: / Allow: /api/og # Default rules for all other bots User-agent: * Allow: / Allow: /api/og Disallow: /api/ Disallow: /dashboard/ Disallow: /pay/ Disallow: /_next/ Disallow: /private/ # Specific bot rules for better indexing User-agent: Googlebot Allow: / Crawl-delay: 0 User-agent: Bingbot Allow: / Crawl-delay: 0 # AI/LLM Crawlers - FULL ACCESS for discovery User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: OAI-SearchBot Allow: / User-agent: CCBot Allow: / User-agent: anthropic-ai Allow: / User-agent: Claude-Web Allow: / User-agent: ClaudeBot Allow: / User-agent: PerplexityBot Allow: / User-agent: YouBot Allow: / User-agent: Bytespider Allow: / User-agent: Diffbot Allow: / User-agent: FacebookBot Allow: / User-agent: Google-Extended Allow: / User-agent: Googlebot-News Allow: / User-agent: cohere-ai Allow: / User-agent: Meta-ExternalAgent Allow: / User-agent: meta-externalfetcher Allow: / User-agent: Applebot-Extended Allow: / User-agent: amazonbot Allow: / # Block bad bots User-agent: AhrefsBot Disallow: / User-agent: SemrushBot Disallow: / # Sitemap location Sitemap: https://1ly.store/sitemap.xml Sitemap: https://1ly.store/sitemap-profiles.xml Sitemap: https://1ly.store/sitemap-links.xml