From 447344784302670bfbed4a1a604c34383d1a69b5 Mon Sep 17 00:00:00 2001 From: Mendon Kissling <59585235+mendonk@users.noreply.github.com> Date: Wed, 22 Oct 2025 09:06:19 -0400 Subject: [PATCH 1/2] add-robots-block-crawlers --- docs/static/robots.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 docs/static/robots.txt diff --git a/docs/static/robots.txt b/docs/static/robots.txt new file mode 100644 index 00000000..783e3e94 --- /dev/null +++ b/docs/static/robots.txt @@ -0,0 +1,12 @@ +# Robots.txt for OpenRAG Documentation + +# Block all crawlers by default +User-agent: * +Disallow: / + +# Allow specific crawlers if needed (uncomment when ready for launch) +# User-agent: Googlebot +# Allow: / + +# Sitemap location (uncomment when ready for launch) +# Sitemap: https://docs.openr.ag/sitemap.xml From c5b4b8164023d854cae17d63a78facf1e57f6ce9 Mon Sep 17 00:00:00 2001 From: Mendon Kissling <59585235+mendonk@users.noreply.github.com> Date: Wed, 22 Oct 2025 09:30:31 -0400 Subject: [PATCH 2/2] block-indexing --- docs/docusaurus.config.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index ab33c338..f0767fa6 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -25,6 +25,9 @@ const config = { // For GitHub pages deployment, it is often '//' baseUrl: process.env.BASE_URL ? process.env.BASE_URL : '/', + // Control search engine indexing - set to true to prevent indexing + noIndex: true, + // GitHub pages deployment config. // If you aren't using GitHub pages, you don't need these. organizationName: 'langflow-ai', // Usually your GitHub org/user name.