Merge pull request #284 from langflow-ai/docs-add-robots-txt
docs: add robots.txt and noIndex
This commit is contained in:
commit
2f3b7b69bd
2 changed files with 15 additions and 0 deletions
|
|
@ -25,6 +25,9 @@ const config = {
|
|||
// For GitHub pages deployment, it is often '/<projectName>/'
|
||||
baseUrl: process.env.BASE_URL ? process.env.BASE_URL : '/',
|
||||
|
||||
// Control search engine indexing - set to true to prevent indexing
|
||||
noIndex: true,
|
||||
|
||||
// GitHub pages deployment config.
|
||||
// If you aren't using GitHub pages, you don't need these.
|
||||
organizationName: 'langflow-ai', // Usually your GitHub org/user name.
|
||||
|
|
|
|||
12
docs/static/robots.txt
vendored
Normal file
12
docs/static/robots.txt
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
# Robots.txt for OpenRAG Documentation
|
||||
|
||||
# Block all crawlers by default
|
||||
User-agent: *
|
||||
Disallow: /
|
||||
|
||||
# Allow specific crawlers if needed (uncomment when ready for launch)
|
||||
# User-agent: Googlebot
|
||||
# Allow: /
|
||||
|
||||
# Sitemap location (uncomment when ready for launch)
|
||||
# Sitemap: https://docs.openr.ag/sitemap.xml
|
||||
Loading…
Add table
Reference in a new issue