User-agent: * Allow: / Disallow: /admin # /saved is a per-device private page (localStorage bookmarks) with no # indexable content. Shared-list mode (/saved?ids=X,Y,Z) also generates # an infinite permutation space that would waste crawl budget — the # actual resource pages Google should index are linked from /saved via # so it still finds them. Disallow: /saved # Note: /?q=... search URLs are intentionally crawlable. Googlebot # executes JS and renders ItemList JSON-LD on those pages, which # surfaces our resource catalog. Canonical tag points to / so # duplicate-content penalties don't apply. # Server-rendered SSR surfaces — explicit allow so partner crawlers # (and AI search engines that respect explicit signals over implicit # ones) know these are the indexable, structured-data-rich pages we # want them to focus on. R7+R8 added per-city pages for snap-stores # and school-meals on top of the existing /food-pantry hierarchy. Allow: /resource/ Allow: /food-pantry Allow: /food-pantry/ Allow: /snap-stores Allow: /snap-stores/ Allow: /school-meals Allow: /school-meals/ Allow: /health-centers Allow: /health-centers/ Allow: /wic-offices Allow: /wic-offices/ Allow: /counties Allow: /counties/ Allow: /guides Allow: /guides/ Allow: /es Allow: /es/ # 2026 Q2 high-volume "near me" landings + federal-program hubs. # Phase 9-12 added these as standalone SSR pages with FAQPage + # HowTo + GovernmentService schema. Listing each explicitly so # crawl-budget bots prioritise them over deeper pagination. Allow: /food-help Allow: /food-help/ Allow: /food-pantry-near-me Allow: /food-bank-near-me Allow: /free-food-today Allow: /wic-near-me Allow: /summer-meals-near-me Allow: /school-meals-near-me Allow: /snap-eligibility Allow: /apply-snap Allow: /apply-snap/ Allow: /apply-wic Allow: /apply-wic/ Allow: /disasters Allow: /pantry Allow: /partner # Sprint-13 federal-program landings + dedicated donate channels Allow: /tefap Allow: /csfp Allow: /senior-food-help Allow: /donate/daf Allow: /donate/stock Allow: /donate/employer-match Allow: /donate/legacy Allow: /donate/crypto Allow: /donate/in-memory Allow: /donate/find-us Allow: /donate/wire Allow: /embed Allow: /embed/widget Allow: /embed/embed.js # Verified Badge program — partner pantries embed the SVG on their # own websites. Each badge URL = a backlink + brand impression. Allow: /badges Allow: /badge/ Allow: /og/resource/ # Hunger Atlas — citation-ready state-by-state stats for journalists, # researchers, foundations. Each /atlas/ page is a unique # fresh-content surface with Dataset + BreadcrumbList JSON-LD. Allow: /atlas Allow: /atlas/ # Research page — academic + journalist data-access landing. # Dataset JSON-LD eligible for Google Dataset Search. Allow: /research # Editorial + journalist surfaces shipped in this loop Allow: /calendar Allow: /glossary Allow: /story-pitches Allow: /sponsors Allow: /changelog Allow: /security Allow: /submit Allow: /feeds Allow: /partner/leaderboard Allow: /embed/catalog # Spanish editorial surfaces Allow: /es/atlas Allow: /es/atlas/ Allow: /es/disasters/ Allow: /es/glossary Allow: /es/calendar # Sister 501(c)(3) entity surfaces (R67 sister-sites buildout). # Each sister has its own NonprofitOrganization schema, distinct # canonical, and full FAQ / DonateAction / NewsArticle SSR coverage. Allow: /texas Allow: /texas/ Allow: /florida Allow: /florida/ # AI Discovery Endpoints Allow: /llms.txt Allow: /llms-full.txt Allow: /.well-known/ai-plugin.json Allow: /.well-known/mcp.json Allow: /mcp/v1 Allow: /api/openapi.json Allow: /press/feed.json Allow: /press/feed.atom # HSDS 3.0 Open Referral feed — spec-compliant food-assistance data # for Findhelp, United Way 211, Unite Us, academic researchers, etc. Allow: /hsds/v3/ Allow: /hsds/v3/datapackage.json # Sitemap. /sitemap.xml is the static, hand-curated list of headline # pages. /sitemap-dynamic.xml proxies to the worker which generates # the long tail — every state, every city, every popular /resource/:id, # every /snap-stores//, every /school-meals//. Sitemap: https://feedam.org/sitemap.xml Sitemap: https://feedam.org/sitemap-dynamic.xml # Crawl-delay for politeness Crawl-delay: 1 # Allow all common bots User-agent: Googlebot User-agent: Bingbot User-agent: Slurp User-agent: DuckDuckBot Allow: / # Allow AI agents — every AI crawler that respects robots.txt and # cites sources is a brand-vector for Feed America. We allow them # all explicitly so they don't have to guess from the catch-all. User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: OAI-SearchBot Allow: / User-agent: Claude-Web Allow: / User-agent: ClaudeBot Allow: / User-agent: Anthropic-AI Allow: / User-agent: PerplexityBot Allow: / User-agent: Perplexity-User Allow: / User-agent: Google-Extended Allow: / User-agent: Applebot-Extended Allow: / User-agent: Bytespider Allow: / User-agent: meta-externalagent Allow: / User-agent: Meta-ExternalAgent Allow: / User-agent: YouBot Allow: / User-agent: cohere-ai Allow: / User-agent: cohere-training-data-crawler Allow: / User-agent: Diffbot Allow: / User-agent: AndiBot Allow: /