Merge branch 'main' of github.com:langflow-ai/openrag into feat/new-onboarding

This commit is contained in:
Mike Fortman 2025-10-23 09:03:33 -05:00
commit 6b583813ec
6 changed files with 24 additions and 0 deletions

1
docs/.gitignore vendored
View file

@ -23,3 +23,4 @@ yarn-error.log*
!package.json
!package-lock.json
!yarn.lock
!scraper.config.json

View file

@ -25,6 +25,9 @@ const config = {
// For GitHub pages deployment, it is often '/<projectName>/'
baseUrl: process.env.BASE_URL ? process.env.BASE_URL : '/',
// Control search engine indexing - set to true to prevent indexing
noIndex: true,
// GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these.
organizationName: 'langflow-ai', // Usually your GitHub org/user name.

View file

@ -6,6 +6,7 @@
"docusaurus": "docusaurus",
"start": "docusaurus start",
"build": "docusaurus build",
"build:pdf": "npm run build && npm run serve & sleep 10 && npx docusaurus-to-pdf && pkill -f 'docusaurus serve'",
"swizzle": "docusaurus swizzle",
"deploy": "docusaurus deploy",
"clear": "docusaurus clear",

Binary file not shown.

7
docs/scraper.config.json Normal file
View file

@ -0,0 +1,7 @@
{
"baseUrl": "http://localhost:3000",
"entryPoint": "http://localhost:3000",
"outputDir": "./pdf/openrag-documentation.pdf",
"customStyles": "table { max-width: 3500px !important; } .navbar, .footer, .breadcrumbs { display: none !important; }",
"forceImages": true
}

12
docs/static/robots.txt vendored Normal file
View file

@ -0,0 +1,12 @@
# Robots.txt for OpenRAG Documentation
# Block all crawlers by default
User-agent: *
Disallow: /
# Allow specific crawlers if needed (uncomment when ready for launch)
# User-agent: Googlebot
# Allow: /
# Sitemap location (uncomment when ready for launch)
# Sitemap: https://docs.openr.ag/sitemap.xml