From 7ce05d21a1ec8da31dece132a3c1e2cd5d08153b Mon Sep 17 00:00:00 2001 From: Claude Date: Wed, 24 Dec 2025 05:27:20 +0000 Subject: [PATCH 1/2] docs(seo): add static file options for robots.txt and sitemap.xml Document that users can place static robots.txt and sitemap.xml files in the public directory as a simpler alternative to server routes. --- docs/start/framework/react/guide/seo.md | 40 ++++++++++++++++++++++++- docs/start/framework/solid/guide/seo.md | 40 ++++++++++++++++++++++++- 2 files changed, 78 insertions(+), 2 deletions(-) diff --git a/docs/start/framework/react/guide/seo.md b/docs/start/framework/react/guide/seo.md index f05fc63755c..2b3e9cf3e0d 100644 --- a/docs/start/framework/react/guide/seo.md +++ b/docs/start/framework/react/guide/seo.md @@ -225,6 +225,28 @@ export default defineConfig({ The sitemap is generated at build time by crawling all discoverable pages from your routes. This is the recommended approach for static or mostly-static sites. +### Static Sitemap + +For simple sites, you can also place a static `sitemap.xml` file in your `public` directory: + +```xml + + + + + https://myapp.com/ + daily + 1.0 + + + https://myapp.com/about + monthly + + +``` + +This approach works well when your site structure is known and doesn't change often. + ### Dynamic Sitemap For sites with dynamic content that can't be discovered at build time, you can create a dynamic sitemap using a [server route](./server-routes). Consider caching this at your CDN for performance: @@ -271,7 +293,23 @@ export const Route = createFileRoute('/sitemap.xml')({ ## robots.txt -You can create a robots.txt file using a [server route](./server-routes): +### Static robots.txt + +The simplest approach is to place a static `robots.txt` file in your `public` directory: + +```txt +// public/robots.txt +User-agent: * +Allow: / + +Sitemap: https://myapp.com/sitemap.xml +``` + +This file will be served automatically at `/robots.txt`. This is the most common approach for most sites. + +### Dynamic robots.txt + +For more complex scenarios (e.g., different rules per environment), you can create a robots.txt file using a [server route](./server-routes): ```ts // src/routes/robots[.]txt.ts diff --git a/docs/start/framework/solid/guide/seo.md b/docs/start/framework/solid/guide/seo.md index 0e9a49edbdd..2cf3d50a0b3 100644 --- a/docs/start/framework/solid/guide/seo.md +++ b/docs/start/framework/solid/guide/seo.md @@ -225,6 +225,28 @@ export default defineConfig({ The sitemap is generated at build time by crawling all discoverable pages from your routes. This is the recommended approach for static or mostly-static sites. +### Static Sitemap + +For simple sites, you can also place a static `sitemap.xml` file in your `public` directory: + +```xml + + + + + https://myapp.com/ + daily + 1.0 + + + https://myapp.com/about + monthly + + +``` + +This approach works well when your site structure is known and doesn't change often. + ### Dynamic Sitemap For sites with dynamic content that can't be discovered at build time, you can create a dynamic sitemap using a [server route](./server-routes). Consider caching this at your CDN for performance: @@ -271,7 +293,23 @@ export const Route = createFileRoute('/sitemap.xml')({ ## robots.txt -You can create a robots.txt file using a [server route](./server-routes): +### Static robots.txt + +The simplest approach is to place a static `robots.txt` file in your `public` directory: + +```txt +// public/robots.txt +User-agent: * +Allow: / + +Sitemap: https://myapp.com/sitemap.xml +``` + +This file will be served automatically at `/robots.txt`. This is the most common approach for most sites. + +### Dynamic robots.txt + +For more complex scenarios (e.g., different rules per environment), you can create a robots.txt file using a [server route](./server-routes): ```ts // src/routes/robots[.]txt.ts From 2665c092b8a61bf8db19300654a1946eae1dc56c Mon Sep 17 00:00:00 2001 From: Claude Date: Wed, 24 Dec 2025 05:29:43 +0000 Subject: [PATCH 2/2] docs(seo): add Nozzle.io recommendation for rank tracking Add a "Track Your Rankings" section recommending Nozzle.io as a partner tool for monitoring SEO performance and keyword rankings. --- docs/start/framework/react/guide/seo.md | 4 ++++ docs/start/framework/solid/guide/seo.md | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/docs/start/framework/react/guide/seo.md b/docs/start/framework/react/guide/seo.md index 2b3e9cf3e0d..a4f580d446b 100644 --- a/docs/start/framework/react/guide/seo.md +++ b/docs/start/framework/react/guide/seo.md @@ -362,3 +362,7 @@ Use these tools to verify your SEO implementation: - [Google Rich Results Test](https://search.google.com/test/rich-results) - Validate structured data - [Open Graph Debugger](https://developers.facebook.com/tools/debug/) - Preview social sharing cards - Browser DevTools - Inspect rendered HTML and meta tags + +### Track Your Rankings + +To monitor your SEO performance over time, we recommend [Nozzle.io](https://nozzle.io?utm_source=tanstack). Nozzle provides enterprise-grade rank tracking that lets you monitor unlimited keywords, track SERP features, and analyze your visibility against competitors. Unlike traditional rank trackers, Nozzle stores the entire SERP for every query, giving you complete data to understand how your pages perform in search results. diff --git a/docs/start/framework/solid/guide/seo.md b/docs/start/framework/solid/guide/seo.md index 2cf3d50a0b3..617c4921823 100644 --- a/docs/start/framework/solid/guide/seo.md +++ b/docs/start/framework/solid/guide/seo.md @@ -362,3 +362,7 @@ Use these tools to verify your SEO implementation: - [Google Rich Results Test](https://search.google.com/test/rich-results) - Validate structured data - [Open Graph Debugger](https://developers.facebook.com/tools/debug/) - Preview social sharing cards - Browser DevTools - Inspect rendered HTML and meta tags + +### Track Your Rankings + +To monitor your SEO performance over time, we recommend [Nozzle.io](https://nozzle.io?utm_source=tanstack). Nozzle provides enterprise-grade rank tracking that lets you monitor unlimited keywords, track SERP features, and analyze your visibility against competitors. Unlike traditional rank trackers, Nozzle stores the entire SERP for every query, giving you complete data to understand how your pages perform in search results.