diff --git a/404.html b/404.html index 23981fdb..c04d90f1 100644 --- a/404.html +++ b/404.html @@ -4,7 +4,7 @@ OpenRAG - + diff --git a/agents/index.html b/agents/index.html index 257053a1..6958c360 100644 --- a/agents/index.html +++ b/agents/index.html @@ -4,7 +4,7 @@ Langflow in OpenRAG | OpenRAG - + @@ -37,7 +37,7 @@ This filter is the Knowle To modify a flow, click Settings, and click Edit in Langflow. OpenRAG's visual editor is based on the Langflow visual editor, so you can edit your flows to match your specific use case.

For an example of changing out the agent's language model in OpenRAG, see the Quickstart.

-

To restore the flow to its initial state, in OpenRAG, click Settings, and then click Restore Flow. +

To restore the flow to its initial state, in OpenRAG, click Settings, and then click Restore Flow. OpenRAG warns you that this discards all custom settings. Click Restore to restore the flow.

Additional Langflow functionality

Langflow includes features beyond Agents to help you integrate OpenRAG into your application, and all Langflow features are included in OpenRAG.

diff --git a/assets/js/0ba6a408.29066fa2.js b/assets/js/0ba6a408.29066fa2.js deleted file mode 100644 index a37f02a0..00000000 --- a/assets/js/0ba6a408.29066fa2.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[571],{3227:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>u,contentTitle:()=>h,default:()=>x,frontMatter:()=>a,metadata:()=>i,toc:()=>p});const i=JSON.parse('{"id":"core-components/knowledge","title":"OpenSearch in OpenRAG","description":"OpenRAG uses OpenSearch for its vector-backed knowledge store.","source":"@site/docs/core-components/knowledge.mdx","sourceDirName":"core-components","slug":"/knowledge","permalink":"/knowledge","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/knowledge.mdx","tags":[],"version":"current","frontMatter":{"title":"OpenSearch in OpenRAG","slug":"/knowledge"},"sidebar":"tutorialSidebar","previous":{"title":"Langflow in OpenRAG","permalink":"/agents"},"next":{"title":"Docling in OpenRAG","permalink":"/ingestion"}}');var t=s(4848),o=s(8453),r=s(1610),l=s(1470),d=s(9365),c=s(3782);const a={title:"OpenSearch in OpenRAG",slug:"/knowledge"},h=void 0,u={},p=[{value:"Authentication and document access",id:"auth",level:2},{value:"Ingest knowledge",id:"ingest-knowledge",level:2},{value:"Direct file ingestion",id:"direct-file-ingestion",level:3},{value:"Ingest files through OAuth connectors",id:"oauth-ingestion",level:3},{value:"Explore knowledge",id:"explore-knowledge",level:2},...c.RM,{value:"Create knowledge filters",id:"create-knowledge-filters",level:2},{value:"OpenRAG default configuration",id:"openrag-default-configuration",level:2}];function g(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,t.jsxs)(t.Fragment,{children:[(0,t.jsxs)(n.p,{children:["OpenRAG uses ",(0,t.jsx)(n.a,{href:"https://docs.opensearch.org/latest/",children:"OpenSearch"})," for its vector-backed knowledge store.\nThis is a specialized database for storing and retrieving embeddings, which helps your Agent efficiently find relevant information.\nOpenSearch provides powerful hybrid search capabilities with enterprise-grade security and multi-tenancy support."]}),"\n",(0,t.jsx)(n.h2,{id:"auth",children:"Authentication and document access"}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG supports two authentication modes based on how you ",(0,t.jsx)(n.a,{href:"/install",children:"install OpenRAG"}),", and which mode you choose affects document access."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"No-auth mode (Basic Setup)"}),": This mode uses a single anonymous JWT token for OpenSearch authentication, so documents uploaded to the ",(0,t.jsx)(n.code,{children:"documents"})," index by one user are visible to all other users on the OpenRAG server."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"OAuth mode (Advanced Setup)"}),": Each OpenRAG user is granted a JWT token, and each document is tagged with user ownership. Documents are filtered by user ownership, ensuring users only see documents they uploaded or have access to."]}),"\n",(0,t.jsx)(n.h2,{id:"ingest-knowledge",children:"Ingest knowledge"}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG supports knowledge ingestion through direct file uploads and OAuth connectors.\nTo configure the knowledge ingestion pipeline parameters, see ",(0,t.jsx)(n.a,{href:"/ingestion",children:"Docling Ingestion"}),"."]}),"\n",(0,t.jsx)(n.h3,{id:"direct-file-ingestion",children:"Direct file ingestion"}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"Knowledge Ingest"})," flow uses Langflow's ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-data#file",children:[(0,t.jsx)(n.strong,{children:"File"})," component"]})," to split and embed files loaded from your local machine into the OpenSearch database."]}),"\n",(0,t.jsxs)(n.p,{children:["The default path to your local folder is mounted from the ",(0,t.jsx)(n.code,{children:"./documents"})," folder in your OpenRAG project directory to the ",(0,t.jsx)(n.code,{children:"/app/documents/"})," directory inside the Docker container. Files added to the host or the container will be visible in both locations. To configure this location, modify the ",(0,t.jsx)(n.strong,{children:"Documents Paths"})," variable in either the TUI's ",(0,t.jsx)(n.a,{href:"/install#setup",children:"Advanced Setup"})," menu or in the ",(0,t.jsx)(n.code,{children:".env"})," used by Docker Compose."]}),"\n",(0,t.jsxs)(n.p,{children:["To load and process a single file from the mapped location, click ",(0,t.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then click ",(0,t.jsx)(n.strong,{children:"Add File"}),".\nThe file is loaded into your OpenSearch database, and appears in the Knowledge page."]}),"\n",(0,t.jsxs)(n.p,{children:["To load and process a directory from the mapped location, click ",(0,t.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then click ",(0,t.jsx)(n.strong,{children:"Process Folder"}),".\nThe files are loaded into your OpenSearch database, and appear in the Knowledge page."]}),"\n",(0,t.jsx)(n.h3,{id:"oauth-ingestion",children:"Ingest files through OAuth connectors"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG supports Google Drive, OneDrive, and Sharepoint as OAuth connectors for seamless document synchronization."}),"\n",(0,t.jsx)(n.p,{children:"OAuth integration allows individual users to connect their personal cloud storage accounts to OpenRAG. Each user must separately authorize OpenRAG to access their own cloud storage files. When a user connects a cloud service, they are redirected to authenticate with that service provider and grant OpenRAG permission to sync documents from their personal cloud storage."}),"\n",(0,t.jsx)(n.p,{children:"Before users can connect their cloud storage accounts, you must configure OAuth credentials in OpenRAG. This requires registering OpenRAG as an OAuth application with a cloud provider and obtaining client ID and secret keys for each service you want to support."}),"\n",(0,t.jsx)(n.p,{children:"To add an OAuth connector to OpenRAG, do the following.\nThis example uses Google OAuth.\nIf you wish to use another provider, add the secrets to another provider."}),"\n",(0,t.jsxs)(l.A,{groupId:"Installation type",children:[(0,t.jsx)(d.A,{value:"TUI",label:"TUI",default:!0,children:(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["If OpenRAG is running, stop it with ",(0,t.jsx)(n.strong,{children:"Status"})," > ",(0,t.jsx)(n.strong,{children:"Stop Services"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(n.strong,{children:"Advanced Setup"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:["Add the OAuth provider's client and secret key in the ",(0,t.jsx)(n.a,{href:"/install#setup",children:"Advanced Setup"})," menu."]}),"\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(n.strong,{children:"Save Configuration"}),".\nThe TUI generates a new ",(0,t.jsx)(n.code,{children:".env"})," file with your OAuth values."]}),"\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(n.strong,{children:"Start Container Services"}),"."]}),"\n"]})}),(0,t.jsxs)(d.A,{value:".env",label:".env",children:[(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsx)(n.li,{children:"Stop the Docker deployment."}),"\n",(0,t.jsxs)(n.li,{children:["Add the OAuth provider's client and secret key in the ",(0,t.jsx)(n.code,{children:".env"})," file for Docker Compose."]}),"\n"]}),(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"GOOGLE_OAUTH_CLIENT_ID='YOUR_OAUTH_CLIENT_ID'\nGOOGLE_OAUTH_CLIENT_SECRET='YOUR_OAUTH_CLIENT_SECRET'\n"})}),(0,t.jsxs)(n.ol,{start:"3",children:["\n",(0,t.jsxs)(n.li,{children:["Save your ",(0,t.jsx)(n.code,{children:".env"}),". file."]}),"\n",(0,t.jsx)(n.li,{children:"Start the Docker deployment."}),"\n"]})]})]}),"\n",(0,t.jsxs)(n.p,{children:["The OpenRAG frontend at ",(0,t.jsx)(n.code,{children:"http://localhost:3000"})," now redirects to an OAuth callback login page for your OAuth provider.\nA successful authentication opens OpenRAG with the required scopes for your connected storage."]}),"\n",(0,t.jsx)(n.p,{children:"To add knowledge from an OAuth-connected storage provider, do the following:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then select the storage provider, for example, ",(0,t.jsx)(n.strong,{children:"Google Drive"}),".\nThe ",(0,t.jsx)(n.strong,{children:"Add Cloud Knowledge"})," page opens."]}),"\n",(0,t.jsxs)(n.li,{children:["To add files or folders from the connected storage, click ",(0,t.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Add Files"}),".\nSelect the files or folders you want and click ",(0,t.jsx)(n.strong,{children:"Select"}),".\nYou can select multiples."]}),"\n",(0,t.jsxs)(n.li,{children:["When your files are selected, click ",(0,t.jsx)(n.strong,{children:"Ingest Files"}),".\nThe ingestion process may take some time, depending on the size of your documents."]}),"\n",(0,t.jsx)(n.li,{children:"When ingestion is complete, your documents are available in the Knowledge screen."}),"\n"]}),"\n",(0,t.jsx)(n.h2,{id:"explore-knowledge",children:"Explore knowledge"}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"Knowledge"})," page lists the documents OpenRAG has ingested into the OpenSearch vector database's ",(0,t.jsx)(n.code,{children:"documents"})," index."]}),"\n",(0,t.jsxs)(n.p,{children:["To explore your current knowledge, click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"}),".\nClick on a document to display the chunks derived from splitting the default documents into the vector database."]}),"\n",(0,t.jsxs)(n.p,{children:["Documents are processed with the default ",(0,t.jsx)(n.strong,{children:"Knowledge Ingest"})," flow, so if you want to split your documents differently, edit the ",(0,t.jsx)(n.strong,{children:"Knowledge Ingest"})," flow."]}),"\n",(0,t.jsx)(c.Ay,{}),"\n",(0,t.jsx)(n.h2,{id:"create-knowledge-filters",children:"Create knowledge filters"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG includes a knowledge filter system for organizing and managing document collections.\nKnowledge filters are saved search configurations that allow you to create custom views of your document collection. They store search queries, filter criteria, and display settings that can be reused across different parts of OpenRAG."}),"\n",(0,t.jsx)(n.p,{children:"Knowledge filters help agents work more efficiently with large document collections by focusing their context within relevant documents sets."}),"\n",(0,t.jsx)(n.p,{children:"To create a knowledge filter, do the following:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(r.A,{name:"Funnel","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"All Knowledge"}),", and then click ",(0,t.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Create New Filter"}),".\nThe ",(0,t.jsx)(n.strong,{children:"Create New Knowledge Filter"})," pane appears."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Enter a ",(0,t.jsx)(n.strong,{children:"Name"})," and ",(0,t.jsx)(n.strong,{children:"Description"}),", and then click ",(0,t.jsx)(r.A,{name:"Save","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Create Filter"}),".\nA new filter is created with default settings that match everything."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["To modify the default filter, click ",(0,t.jsx)(r.A,{name:"Funnel","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"All Knowledge"}),", and then click your new filter to edit it in the ",(0,t.jsx)(n.strong,{children:"Knowledge Filter"})," pane."]}),"\n",(0,t.jsx)(n.p,{children:"The following filter options are configurable."}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Search Query"}),': Enter text for semantic search, such as "financial reports from Q4".']}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Data Sources"}),": Select specific data sources or folders to include."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Document Types"}),": Filter by file type."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Owners"}),": Filter by who uploaded the documents."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Sources"}),": Filter by connector types, such as local upload or Google Drive."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Result Limit"}),": Set maximum number of results. The default is ",(0,t.jsx)(n.code,{children:"10"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Score Threshold"}),": Set minimum relevance score. The default score is ",(0,t.jsx)(n.code,{children:"0"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["When you're done editing the filter, click ",(0,t.jsx)(r.A,{name:"Save","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Save Configuration"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["To apply the filter to OpenRAG globally, click ",(0,t.jsx)(r.A,{name:"Funnel","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"All Knowledge"}),", and then select the filter to apply."]}),"\n",(0,t.jsxs)(n.p,{children:["To apply the filter to a single chat session, in the ",(0,t.jsx)(r.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Chat"})," window, click ",(0,t.jsx)(n.strong,{children:"@"}),", and then select the filter to apply."]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.h2,{id:"openrag-default-configuration",children:"OpenRAG default configuration"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG automatically detects and configures the correct vector dimensions for embedding models, ensuring optimal search performance and compatibility."}),"\n",(0,t.jsxs)(n.p,{children:["The complete list of supported models is available at ",(0,t.jsxs)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/services/models_service.py",children:[(0,t.jsx)(n.code,{children:"models_service.py"})," in the OpenRAG repository"]}),"."]}),"\n",(0,t.jsx)(n.p,{children:"You can use custom embedding models by specifying them in your configuration."}),"\n",(0,t.jsxs)(n.p,{children:["If you use an unknown embedding model, OpenRAG will automatically fall back to ",(0,t.jsx)(n.code,{children:"1536"})," dimensions and log a warning. The system will continue to work, but search quality may be affected if the actual model dimensions differ from ",(0,t.jsx)(n.code,{children:"1536"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default embedding dimension is ",(0,t.jsx)(n.code,{children:"1536"})," and the default model is ",(0,t.jsx)(n.code,{children:"text-embedding-3-small"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["For models with known vector dimensions, see ",(0,t.jsxs)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py",children:[(0,t.jsx)(n.code,{children:"settings.py"})," in the OpenRAG repository"]}),"."]})]})}function x(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(g,{...e})}):g(e)}},3782:(e,n,s)=>{s.d(n,{Ay:()=>d,RM:()=>r});var i=s(4848),t=s(8453),o=s(1610);const r=[];function l(e){const n={a:"a",p:"p",strong:"strong",...(0,t.R)(),...e.components};return(0,i.jsxs)(n.p,{children:["All flows included with OpenRAG are designed to be modular, performant, and provider-agnostic.\nTo modify a flow, click ",(0,i.jsx)(o.A,{name:"Settings2","aria-hidden":"true"})," ",(0,i.jsx)(n.strong,{children:"Settings"}),", and click ",(0,i.jsx)(n.strong,{children:"Edit in Langflow"}),".\nOpenRAG's visual editor is based on the ",(0,i.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"}),", so you can edit your flows to match your specific use case."]})}function d(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(l,{...e})}):l(e)}}}]); \ No newline at end of file diff --git a/assets/js/0ba6a408.b13d5c3c.js b/assets/js/0ba6a408.b13d5c3c.js new file mode 100644 index 00000000..0c497e0c --- /dev/null +++ b/assets/js/0ba6a408.b13d5c3c.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[571],{3227:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>p,contentTitle:()=>h,default:()=>x,frontMatter:()=>a,metadata:()=>i,toc:()=>u});const i=JSON.parse('{"id":"core-components/knowledge","title":"OpenSearch in OpenRAG","description":"OpenRAG uses OpenSearch for its vector-backed knowledge store.","source":"@site/docs/core-components/knowledge.mdx","sourceDirName":"core-components","slug":"/knowledge","permalink":"/knowledge","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/knowledge.mdx","tags":[],"version":"current","frontMatter":{"title":"OpenSearch in OpenRAG","slug":"/knowledge"},"sidebar":"tutorialSidebar","previous":{"title":"Langflow in OpenRAG","permalink":"/agents"},"next":{"title":"Docling in OpenRAG","permalink":"/ingestion"}}');var t=s(4848),o=s(8453),r=s(1610),l=s(1470),d=s(9365),c=s(3782);const a={title:"OpenSearch in OpenRAG",slug:"/knowledge"},h=void 0,p={},u=[{value:"Authentication and document access",id:"auth",level:2},{value:"Ingest knowledge",id:"ingest-knowledge",level:2},{value:"Direct file ingestion",id:"direct-file-ingestion",level:3},{value:"Ingest files through OAuth connectors",id:"oauth-ingestion",level:3},{value:"Explore knowledge",id:"explore-knowledge",level:2},...c.RM,{value:"Create knowledge filters",id:"create-knowledge-filters",level:2},{value:"OpenRAG default configuration",id:"openrag-default-configuration",level:2}];function g(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,t.jsxs)(t.Fragment,{children:[(0,t.jsxs)(n.p,{children:["OpenRAG uses ",(0,t.jsx)(n.a,{href:"https://docs.opensearch.org/latest/",children:"OpenSearch"})," for its vector-backed knowledge store.\nThis is a specialized database for storing and retrieving embeddings, which helps your Agent efficiently find relevant information.\nOpenSearch provides powerful hybrid search capabilities with enterprise-grade security and multi-tenancy support."]}),"\n",(0,t.jsx)(n.h2,{id:"auth",children:"Authentication and document access"}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG supports two authentication modes based on how you ",(0,t.jsx)(n.a,{href:"/install",children:"install OpenRAG"}),", and which mode you choose affects document access."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"No-auth mode (Basic Setup)"}),": This mode uses a single anonymous JWT token for OpenSearch authentication, so documents uploaded to the ",(0,t.jsx)(n.code,{children:"documents"})," index by one user are visible to all other users on the OpenRAG server."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"OAuth mode (Advanced Setup)"}),": Each OpenRAG user is granted a JWT token, and each document is tagged with user ownership. Documents are filtered by user ownership, ensuring users only see documents they uploaded or have access to."]}),"\n",(0,t.jsx)(n.h2,{id:"ingest-knowledge",children:"Ingest knowledge"}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG supports knowledge ingestion through direct file uploads and OAuth connectors.\nTo configure the knowledge ingestion pipeline parameters, see ",(0,t.jsx)(n.a,{href:"/ingestion",children:"Docling Ingestion"}),"."]}),"\n",(0,t.jsx)(n.h3,{id:"direct-file-ingestion",children:"Direct file ingestion"}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"Knowledge Ingest"})," flow uses Langflow's ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-data#file",children:[(0,t.jsx)(n.strong,{children:"File"})," component"]})," to split and embed files loaded from your local machine into the OpenSearch database."]}),"\n",(0,t.jsxs)(n.p,{children:["The default path to your local folder is mounted from the ",(0,t.jsx)(n.code,{children:"./documents"})," folder in your OpenRAG project directory to the ",(0,t.jsx)(n.code,{children:"/app/documents/"})," directory inside the Docker container. Files added to the host or the container will be visible in both locations. To configure this location, modify the ",(0,t.jsx)(n.strong,{children:"Documents Paths"})," variable in either the TUI's ",(0,t.jsx)(n.a,{href:"/install#setup",children:"Advanced Setup"})," menu or in the ",(0,t.jsx)(n.code,{children:".env"})," used by Docker Compose."]}),"\n",(0,t.jsxs)(n.p,{children:["To load and process a single file from the mapped location, click ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then click ",(0,t.jsx)(r.A,{name:"File","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"File"}),".\nThe file is loaded into your OpenSearch database, and appears in the Knowledge page."]}),"\n",(0,t.jsxs)(n.p,{children:["To load and process a directory from the mapped location, click ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then click ",(0,t.jsx)(r.A,{name:"Folder","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Folder"}),".\nThe files are loaded into your OpenSearch database, and appear in the Knowledge page."]}),"\n",(0,t.jsxs)(n.p,{children:["To add files directly to a chat session, click ",(0,t.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," in the chat input and select the files you want to include. Files added this way are processed and made available to the agent for the current conversation, and are not permanently added to the knowledge base."]}),"\n",(0,t.jsx)(n.h3,{id:"oauth-ingestion",children:"Ingest files through OAuth connectors"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG supports Google Drive, OneDrive, and Sharepoint as OAuth connectors for seamless document synchronization."}),"\n",(0,t.jsx)(n.p,{children:"OAuth integration allows individual users to connect their personal cloud storage accounts to OpenRAG. Each user must separately authorize OpenRAG to access their own cloud storage files. When a user connects a cloud service, they are redirected to authenticate with that service provider and grant OpenRAG permission to sync documents from their personal cloud storage."}),"\n",(0,t.jsx)(n.p,{children:"Before users can connect their cloud storage accounts, you must configure OAuth credentials in OpenRAG. This requires registering OpenRAG as an OAuth application with a cloud provider and obtaining client ID and secret keys for each service you want to support."}),"\n",(0,t.jsx)(n.p,{children:"To add an OAuth connector to OpenRAG, do the following.\nThis example uses Google OAuth.\nIf you wish to use another provider, add the secrets to another provider."}),"\n",(0,t.jsxs)(l.A,{groupId:"Installation type",children:[(0,t.jsx)(d.A,{value:"TUI",label:"TUI",default:!0,children:(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["If OpenRAG is running, stop it with ",(0,t.jsx)(n.strong,{children:"Status"})," > ",(0,t.jsx)(n.strong,{children:"Stop Services"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(n.strong,{children:"Advanced Setup"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:["Add the OAuth provider's client and secret key in the ",(0,t.jsx)(n.a,{href:"/install#setup",children:"Advanced Setup"})," menu."]}),"\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(n.strong,{children:"Save Configuration"}),".\nThe TUI generates a new ",(0,t.jsx)(n.code,{children:".env"})," file with your OAuth values."]}),"\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(n.strong,{children:"Start Container Services"}),"."]}),"\n"]})}),(0,t.jsx)(d.A,{value:".env",label:".env",children:(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsx)(n.li,{children:"Stop the Docker deployment."}),"\n",(0,t.jsxs)(n.li,{children:["Add the OAuth provider's client and secret key in the ",(0,t.jsx)(n.code,{children:".env"})," file for Docker Compose.","\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"GOOGLE_OAUTH_CLIENT_ID='YOUR_OAUTH_CLIENT_ID'\nGOOGLE_OAUTH_CLIENT_SECRET='YOUR_OAUTH_CLIENT_SECRET'\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["Save your ",(0,t.jsx)(n.code,{children:".env"})," file."]}),"\n",(0,t.jsx)(n.li,{children:"Start the Docker deployment."}),"\n"]})})]}),"\n",(0,t.jsxs)(n.p,{children:["The OpenRAG frontend at ",(0,t.jsx)(n.code,{children:"http://localhost:3000"})," now redirects to an OAuth callback login page for your OAuth provider.\nA successful authentication opens OpenRAG with the required scopes for your connected storage."]}),"\n",(0,t.jsx)(n.p,{children:"To add knowledge from an OAuth-connected storage provider, do the following:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["Click ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then select the storage provider, for example, ",(0,t.jsx)(n.strong,{children:"Google Drive"}),".\nThe ",(0,t.jsx)(n.strong,{children:"Add Cloud Knowledge"})," page opens."]}),"\n",(0,t.jsxs)(n.li,{children:["To add files or folders from the connected storage, click ",(0,t.jsx)(n.strong,{children:"Add Files"}),".\nSelect the files or folders you want and click ",(0,t.jsx)(n.strong,{children:"Select"}),".\nYou can select multiple files."]}),"\n",(0,t.jsxs)(n.li,{children:["When your files are selected, click ",(0,t.jsx)(n.strong,{children:"Ingest Files"}),".\nThe ingestion process may take some time, depending on the size of your documents."]}),"\n",(0,t.jsx)(n.li,{children:"When ingestion is complete, your documents are available in the Knowledge screen."}),"\n"]}),"\n",(0,t.jsx)(n.h2,{id:"explore-knowledge",children:"Explore knowledge"}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"Knowledge"})," page lists the documents OpenRAG has ingested into the OpenSearch vector database's ",(0,t.jsx)(n.code,{children:"documents"})," index."]}),"\n",(0,t.jsxs)(n.p,{children:["To explore your current knowledge, click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"}),".\nClick on a document to display the chunks derived from splitting the default documents into the vector database."]}),"\n",(0,t.jsxs)(n.p,{children:["Documents are processed with the default ",(0,t.jsx)(n.strong,{children:"Knowledge Ingest"})," flow, so if you want to split your documents differently, edit the ",(0,t.jsx)(n.strong,{children:"Knowledge Ingest"})," flow."]}),"\n",(0,t.jsx)(c.Ay,{}),"\n",(0,t.jsx)(n.h2,{id:"create-knowledge-filters",children:"Create knowledge filters"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG includes a knowledge filter system for organizing and managing document collections.\nKnowledge filters are saved search configurations that allow you to create custom views of your document collection. They store search queries, filter criteria, and display settings that can be reused across different parts of OpenRAG."}),"\n",(0,t.jsx)(n.p,{children:"Knowledge filters help agents work more efficiently with large document collections by focusing their context within relevant documents sets."}),"\n",(0,t.jsx)(n.p,{children:"To create a knowledge filter, do the following:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Knowledge"}),", and then click ",(0,t.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge Filters"}),".\nThe ",(0,t.jsx)(n.strong,{children:"Knowledge Filter"})," pane appears."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Enter a ",(0,t.jsx)(n.strong,{children:"Name"})," and ",(0,t.jsx)(n.strong,{children:"Description"}),", and then click ",(0,t.jsx)(n.strong,{children:"Create Filter"}),".\nA new filter is created with default settings that match all documents."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["To modify the filter, click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"}),", and then click your new filter to edit it in the ",(0,t.jsx)(n.strong,{children:"Knowledge Filter"})," pane."]}),"\n",(0,t.jsx)(n.p,{children:"The following filter options are configurable."}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Search Query"}),': Enter text for semantic search, such as "financial reports from Q4".']}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Data Sources"}),": Select specific data sources or folders to include."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Document Types"}),": Filter by file type."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Owners"}),": Filter by who uploaded the documents."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Connectors"}),": Filter by connector types, such as local upload or Google Drive."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Response Limit"}),": Set maximum number of results. The default is ",(0,t.jsx)(n.code,{children:"10"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Score Threshold"}),": Set minimum relevance score. The default score is ",(0,t.jsx)(n.code,{children:"0"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["When you're done editing the filter, click ",(0,t.jsx)(n.strong,{children:"Update Filter"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["To apply the filter to OpenRAG globally, click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"}),", and then select the filter to apply. One filter can be enabled at a time."]}),"\n",(0,t.jsxs)(n.p,{children:["To apply the filter to a single chat session, in the ",(0,t.jsx)(r.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Chat"})," window, click ",(0,t.jsx)(r.A,{name:"Funnel","aria-hidden":"true"}),", and then select the filter to apply."]}),"\n",(0,t.jsxs)(n.p,{children:["To delete the filter, in the ",(0,t.jsx)(n.strong,{children:"Knowledge Filter"})," pane, click ",(0,t.jsx)(n.strong,{children:"Delete Filter"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.h2,{id:"openrag-default-configuration",children:"OpenRAG default configuration"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG automatically detects and configures the correct vector dimensions for embedding models, ensuring optimal search performance and compatibility."}),"\n",(0,t.jsxs)(n.p,{children:["The complete list of supported models is available at ",(0,t.jsxs)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/services/models_service.py",children:[(0,t.jsx)(n.code,{children:"models_service.py"})," in the OpenRAG repository"]}),"."]}),"\n",(0,t.jsx)(n.p,{children:"You can use custom embedding models by specifying them in your configuration."}),"\n",(0,t.jsxs)(n.p,{children:["If you use an unknown embedding model, OpenRAG will automatically fall back to ",(0,t.jsx)(n.code,{children:"1536"})," dimensions and log a warning. The system will continue to work, but search quality may be affected if the actual model dimensions differ from ",(0,t.jsx)(n.code,{children:"1536"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default embedding dimension is ",(0,t.jsx)(n.code,{children:"1536"})," and the default model is ",(0,t.jsx)(n.code,{children:"text-embedding-3-small"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["For models with known vector dimensions, see ",(0,t.jsxs)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py",children:[(0,t.jsx)(n.code,{children:"settings.py"})," in the OpenRAG repository"]}),"."]})]})}function x(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(g,{...e})}):g(e)}},3782:(e,n,s)=>{s.d(n,{Ay:()=>d,RM:()=>r});var i=s(4848),t=s(8453),o=s(1610);const r=[];function l(e){const n={a:"a",p:"p",strong:"strong",...(0,t.R)(),...e.components};return(0,i.jsxs)(n.p,{children:["All flows included with OpenRAG are designed to be modular, performant, and provider-agnostic.\nTo modify a flow, click ",(0,i.jsx)(o.A,{name:"Settings2","aria-hidden":"true"})," ",(0,i.jsx)(n.strong,{children:"Settings"}),", and click ",(0,i.jsx)(n.strong,{children:"Edit in Langflow"}),".\nOpenRAG's visual editor is based on the ",(0,i.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"}),", so you can edit your flows to match your specific use case."]})}function d(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(l,{...e})}):l(e)}}}]); \ No newline at end of file diff --git a/assets/js/22dd74f7.383b817e.js b/assets/js/22dd74f7.383b817e.js deleted file mode 100644 index 7cbf555c..00000000 --- a/assets/js/22dd74f7.383b817e.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[1567],{5226:e=>{e.exports=JSON.parse('{"version":{"pluginId":"default","version":"current","label":"Next","banner":null,"badge":false,"noIndex":false,"className":"docs-version-current","isLast":true,"docsSidebars":{"tutorialSidebar":[{"type":"link","href":"/","label":"About OpenRAG","docId":"get-started/what-is-openrag","unlisted":false},{"type":"link","href":"/install","label":"Install OpenRAG with TUI","docId":"get-started/install","unlisted":false},{"type":"link","href":"/get-started/docker","label":"Install OpenRAG containers","docId":"get-started/docker","unlisted":false},{"type":"link","href":"/quickstart","label":"Quickstart","docId":"get-started/quickstart","unlisted":false},{"type":"link","href":"/agents","label":"Langflow in OpenRAG","docId":"core-components/agents","unlisted":false},{"type":"link","href":"/knowledge","label":"OpenSearch in OpenRAG","docId":"core-components/knowledge","unlisted":false},{"type":"link","href":"/ingestion","label":"Docling in OpenRAG","docId":"core-components/ingestion","unlisted":false},{"type":"link","href":"/reference/configuration","label":"Environment variables","docId":"reference/configuration","unlisted":false},{"type":"link","href":"/support/troubleshoot","label":"Troubleshooting","docId":"support/troubleshoot","unlisted":false}]},"docs":{"core-components/agents":{"id":"core-components/agents","title":"Langflow in OpenRAG","description":"OpenRAG leverages Langflow\'s Agent component to power the OpenRAG OpenSearch Agent flow.","sidebar":"tutorialSidebar"},"core-components/ingestion":{"id":"core-components/ingestion","title":"Docling in OpenRAG","description":"OpenRAG uses Docling for its document ingestion pipeline.","sidebar":"tutorialSidebar"},"core-components/knowledge":{"id":"core-components/knowledge","title":"OpenSearch in OpenRAG","description":"OpenRAG uses OpenSearch for its vector-backed knowledge store.","sidebar":"tutorialSidebar"},"get-started/docker":{"id":"get-started/docker","title":"Install OpenRAG containers","description":"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments.","sidebar":"tutorialSidebar"},"get-started/install":{"id":"get-started/install","title":"Install OpenRAG with TUI","description":"Install the OpenRAG Python wheel, and then run the OpenRAG Terminal User Interface(TUI) to start your OpenRAG deployment with a guided setup process.","sidebar":"tutorialSidebar"},"get-started/quickstart":{"id":"get-started/quickstart","title":"Quickstart","description":"Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the OpenRAG API.","sidebar":"tutorialSidebar"},"get-started/what-is-openrag":{"id":"get-started/what-is-openrag","title":"What is OpenRAG?","description":"OpenRAG is an open-source package for building agentic RAG systems that integrates with a wide range of orchestration tools, vector databases, and LLM providers.","sidebar":"tutorialSidebar"},"reference/configuration":{"id":"reference/configuration","title":"Environment variables","description":"OpenRAG recognizes environment variables from the following sources:","sidebar":"tutorialSidebar"},"support/troubleshoot":{"id":"support/troubleshoot","title":"Troubleshooting","description":"This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG.","sidebar":"tutorialSidebar"}}}}')}}]); \ No newline at end of file diff --git a/assets/js/22dd74f7.9190bfb2.js b/assets/js/22dd74f7.9190bfb2.js new file mode 100644 index 00000000..8e7a5fce --- /dev/null +++ b/assets/js/22dd74f7.9190bfb2.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[1567],{5226:e=>{e.exports=JSON.parse('{"version":{"pluginId":"default","version":"current","label":"Next","banner":null,"badge":false,"noIndex":false,"className":"docs-version-current","isLast":true,"docsSidebars":{"tutorialSidebar":[{"type":"link","href":"/","label":"About OpenRAG","docId":"get-started/what-is-openrag","unlisted":false},{"type":"link","href":"/install","label":"Install OpenRAG with TUI","docId":"get-started/install","unlisted":false},{"type":"link","href":"/get-started/docker","label":"Install OpenRAG containers","docId":"get-started/docker","unlisted":false},{"type":"link","href":"/quickstart","label":"Quickstart","docId":"get-started/quickstart","unlisted":false},{"type":"link","href":"/agents","label":"Langflow in OpenRAG","docId":"core-components/agents","unlisted":false},{"type":"link","href":"/knowledge","label":"OpenSearch in OpenRAG","docId":"core-components/knowledge","unlisted":false},{"type":"link","href":"/ingestion","label":"Docling in OpenRAG","docId":"core-components/ingestion","unlisted":false},{"type":"link","href":"/reference/configuration","label":"Environment variables","docId":"reference/configuration","unlisted":false},{"type":"link","href":"/support/troubleshoot","label":"Troubleshooting","docId":"support/troubleshoot","unlisted":false}]},"docs":{"core-components/agents":{"id":"core-components/agents","title":"Langflow in OpenRAG","description":"OpenRAG leverages Langflow\'s Agent component to power the OpenRAG OpenSearch Agent flow.","sidebar":"tutorialSidebar"},"core-components/ingestion":{"id":"core-components/ingestion","title":"Docling in OpenRAG","description":"OpenRAG uses Docling for document ingestion.","sidebar":"tutorialSidebar"},"core-components/knowledge":{"id":"core-components/knowledge","title":"OpenSearch in OpenRAG","description":"OpenRAG uses OpenSearch for its vector-backed knowledge store.","sidebar":"tutorialSidebar"},"get-started/docker":{"id":"get-started/docker","title":"Install OpenRAG containers","description":"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments.","sidebar":"tutorialSidebar"},"get-started/install":{"id":"get-started/install","title":"Install OpenRAG with TUI","description":"Install the OpenRAG Python wheel, and then run the OpenRAG Terminal User Interface(TUI) to start your OpenRAG deployment with a guided setup process.","sidebar":"tutorialSidebar"},"get-started/quickstart":{"id":"get-started/quickstart","title":"Quickstart","description":"Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the Langflow API.","sidebar":"tutorialSidebar"},"get-started/what-is-openrag":{"id":"get-started/what-is-openrag","title":"What is OpenRAG?","description":"OpenRAG is an open-source package for building agentic RAG systems that integrates with a wide range of orchestration tools, vector databases, and LLM providers.","sidebar":"tutorialSidebar"},"reference/configuration":{"id":"reference/configuration","title":"Environment variables","description":"OpenRAG recognizes environment variables from the following sources:","sidebar":"tutorialSidebar"},"support/troubleshoot":{"id":"support/troubleshoot","title":"Troubleshooting","description":"This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG.","sidebar":"tutorialSidebar"}}}}')}}]); \ No newline at end of file diff --git a/assets/js/33362219.d707ff03.js b/assets/js/33362219.667bb0cb.js similarity index 84% rename from assets/js/33362219.d707ff03.js rename to assets/js/33362219.667bb0cb.js index a186fa8f..5d9dd8a5 100644 --- a/assets/js/33362219.d707ff03.js +++ b/assets/js/33362219.667bb0cb.js @@ -1 +1 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[9532],{3782:(e,n,t)=>{t.d(n,{Ay:()=>c,RM:()=>i});var o=t(4848),s=t(8453),r=t(1610);const i=[];function a(e){const n={a:"a",p:"p",strong:"strong",...(0,s.R)(),...e.components};return(0,o.jsxs)(n.p,{children:["All flows included with OpenRAG are designed to be modular, performant, and provider-agnostic.\nTo modify a flow, click ",(0,o.jsx)(r.A,{name:"Settings2","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Settings"}),", and click ",(0,o.jsx)(n.strong,{children:"Edit in Langflow"}),".\nOpenRAG's visual editor is based on the ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"}),", so you can edit your flows to match your specific use case."]})}function c(e={}){const{wrapper:n}={...(0,s.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(a,{...e})}):a(e)}},3982:(e,n,t)=>{t.d(n,{A:()=>o});const o=t.p+"assets/images/opensearch-agent-flow-f3b279e02425cd043002eb7749067108.png"},8748:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>h,contentTitle:()=>l,default:()=>g,frontMatter:()=>c,metadata:()=>o,toc:()=>d});const o=JSON.parse('{"id":"core-components/agents","title":"Langflow in OpenRAG","description":"OpenRAG leverages Langflow\'s Agent component to power the OpenRAG OpenSearch Agent flow.","source":"@site/docs/core-components/agents.mdx","sourceDirName":"core-components","slug":"/agents","permalink":"/agents","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/agents.mdx","tags":[],"version":"current","frontMatter":{"title":"Langflow in OpenRAG","slug":"/agents"},"sidebar":"tutorialSidebar","previous":{"title":"Quickstart","permalink":"/quickstart"},"next":{"title":"OpenSearch in OpenRAG","permalink":"/knowledge"}}');var s=t(4848),r=t(8453),i=t(1610),a=(t(1470),t(9365),t(3782));const c={title:"Langflow in OpenRAG",slug:"/agents"},l=void 0,h={},d=[{value:"Use the OpenRAG OpenSearch Agent flow",id:"flow",level:2},...a.RM,{value:"Additional Langflow functionality",id:"additional-langflow-functionality",level:2}];function p(e){const n={a:"a",code:"code",h2:"h2",img:"img",li:"li",p:"p",strong:"strong",ul:"ul",...(0,r.R)(),...e.components},{Details:o}=n;return o||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"OpenRAG leverages Langflow's Agent component to power the OpenRAG OpenSearch Agent flow."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Flows"})," in Langflow are functional representations of application workflows, with multiple ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/concepts-components",children:"component"})," nodes connected as single steps in a workflow."]}),"\n",(0,s.jsxs)(n.p,{children:["In the OpenRAG OpenSearch Agent flow, components like the Langflow ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/agents",children:[(0,s.jsx)(n.strong,{children:"Agent"})," component"]})," and ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,s.jsx)(n.strong,{children:"OpenSearch"})," component"]})," are connected to intelligently chat with your knowledge by embedding your query, comparing it the vector database embeddings, and generating a response with the LLM."]}),"\n",(0,s.jsx)(n.p,{children:(0,s.jsx)(n.img,{alt:"OpenRAG Open Search Agent Flow",src:t(3982).A+"",width:"4084",height:"2176"})}),"\n",(0,s.jsx)(n.p,{children:"The Agent component shines here in its ability to make decisions on not only what query should be sent, but when a query is necessary to solve the problem at hand."}),"\n",(0,s.jsxs)(o,{closed:!0,children:[(0,s.jsx)("summary",{children:"How do agents work?"}),(0,s.jsx)(n.p,{children:"Agents extend Large Language Models (LLMs) by integrating tools, which are functions that provide additional context and enable autonomous task execution. These integrations make agents more specialized and powerful than standalone LLMs."}),(0,s.jsx)(n.p,{children:"Whereas an LLM might generate acceptable, inert responses to general queries and tasks, an agent can leverage the integrated context and tools to provide more relevant responses and even take action. For example, you might create an agent that can access your company's documentation, repositories, and other resources to help your team with tasks that require knowledge of your specific products, customers, and code."}),(0,s.jsx)(n.p,{children:"Agents use LLMs as a reasoning engine to process input, determine which actions to take to address the query, and then generate a response. The response could be a typical text-based LLM response, or it could involve an action, like editing a file, running a script, or calling an external API."}),(0,s.jsx)(n.p,{children:"In an agentic context, tools are functions that the agent can run to perform tasks or access external resources. A function is wrapped as a Tool object with a common interface that the agent understands. Agents become aware of tools through tool registration, which is when the agent is provided a list of available tools typically at agent initialization. The Tool object's description tells the agent what the tool can do so that it can decide whether the tool is appropriate for a given request."})]}),"\n",(0,s.jsx)(n.h2,{id:"flow",children:"Use the OpenRAG OpenSearch Agent flow"}),"\n",(0,s.jsxs)(n.p,{children:["If you've chatted with your knowledge in OpenRAG, you've already experienced the OpenRAG OpenSearch Agent chat flow.\nTo switch OpenRAG over to the ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"})," and view the OpenRAG OpenSearch Agentflow, click ",(0,s.jsx)(i.A,{name:"Settings2","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Settings"}),", and then click ",(0,s.jsx)(n.strong,{children:"Edit in Langflow"}),".\nThis flow contains eight components connected together to chat with your data:"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/agents",children:[(0,s.jsx)(n.strong,{children:"Agent"})," component"]})," orchestrates the entire flow by deciding when to search the knowledge base, how to formulate search queries, and how to combine retrieved information with the user's question to generate a comprehensive response.\nThe ",(0,s.jsx)(n.strong,{children:"Agent"})," behaves according to the prompt in the ",(0,s.jsx)(n.strong,{children:"Agent Instructions"})," field."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,s.jsx)(n.strong,{children:"Chat Input"})," component"]})," is connected to the Agent component's Input port. This allows to flow to be triggered by an incoming prompt from a user or application."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,s.jsx)(n.strong,{children:"OpenSearch"})," component"]})," is connected to the Agent component's Tools port. The agent may not use this database for every request; the agent only uses this connection if it decides the knowledge can help respond to the prompt."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-models",children:[(0,s.jsx)(n.strong,{children:"Language Model"})," component"]})," is connected to the Agent component's Language Model port. The agent uses the connected LLM to reason through the request sent through Chat Input."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,s.jsx)(n.strong,{children:"Embedding Model"})," component"]})," is connected to the OpenSearch component's Embedding port. This component converts text queries into vector representations that are compared with document embeddings stored in OpenSearch for semantic similarity matching. This gives your Agent's queries context."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,s.jsx)(n.strong,{children:"Text Input"})," component"]})," is populated with the global variable ",(0,s.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"}),".\nThis filter is the ",(0,s.jsx)(n.a,{href:"/knowledge#create-knowledge-filters",children:"Knowledge filter"}),", and filters which knowledge sources to search through."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsx)(n.strong,{children:"Agent"})," component's Output port is connected to the ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,s.jsx)(n.strong,{children:"Chat Output"})," component"]}),", which returns the final response to the user or application."]}),"\n",(0,s.jsxs)(n.li,{children:["An ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/mcp-client",children:[(0,s.jsx)(n.strong,{children:"MCP Tools"})," component"]})," is connected to the Agent's ",(0,s.jsx)(n.strong,{children:"Tools"})," port. This component calls the ",(0,s.jsx)(n.a,{href:"/ingestion#url-flow",children:"OpenSearch URL Ingestion flow"}),", which Langflow uses as an MCP server to fetch content from URLs and store in OpenSearch."]}),"\n"]}),"\n",(0,s.jsx)(a.Ay,{}),"\n",(0,s.jsxs)(n.p,{children:["For an example of changing out the agent's language model in OpenRAG, see the ",(0,s.jsx)(n.a,{href:"/quickstart#change-components",children:"Quickstart"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["To restore the flow to its initial state, in OpenRAG, click ",(0,s.jsx)(i.A,{name:"Settings","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Settings"}),", and then click ",(0,s.jsx)(n.strong,{children:"Restore Flow"}),".\nOpenRAG warns you that this discards all custom settings. Click ",(0,s.jsx)(n.strong,{children:"Restore"})," to restore the flow."]}),"\n",(0,s.jsx)(n.h2,{id:"additional-langflow-functionality",children:"Additional Langflow functionality"}),"\n",(0,s.jsx)(n.p,{children:"Langflow includes features beyond Agents to help you integrate OpenRAG into your application, and all Langflow features are included in OpenRAG."}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Langflow can serve your flows as an ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/mcp-server",children:"MCP server"}),", or consume other MCP servers as an ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/mcp-client",children:"MCP client"}),". Get started with the ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/mcp-tutorial",children:"MCP tutorial"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["If you don't see the component you need, extend Langflow's functionality by creating ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/components-custom-components",children:"custom Python components"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Langflow offers component ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/components-bundle-components",children:"bundles"})," to integrate with many popular vector stores, AI/ML providers, and search APIs."]}),"\n"]}),"\n"]})]})}function g(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(p,{...e})}):p(e)}}}]); \ No newline at end of file +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[9532],{3782:(e,n,t)=>{t.d(n,{Ay:()=>c,RM:()=>i});var o=t(4848),s=t(8453),r=t(1610);const i=[];function a(e){const n={a:"a",p:"p",strong:"strong",...(0,s.R)(),...e.components};return(0,o.jsxs)(n.p,{children:["All flows included with OpenRAG are designed to be modular, performant, and provider-agnostic.\nTo modify a flow, click ",(0,o.jsx)(r.A,{name:"Settings2","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Settings"}),", and click ",(0,o.jsx)(n.strong,{children:"Edit in Langflow"}),".\nOpenRAG's visual editor is based on the ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"}),", so you can edit your flows to match your specific use case."]})}function c(e={}){const{wrapper:n}={...(0,s.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(a,{...e})}):a(e)}},3982:(e,n,t)=>{t.d(n,{A:()=>o});const o=t.p+"assets/images/opensearch-agent-flow-f3b279e02425cd043002eb7749067108.png"},8748:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>h,contentTitle:()=>l,default:()=>g,frontMatter:()=>c,metadata:()=>o,toc:()=>d});const o=JSON.parse('{"id":"core-components/agents","title":"Langflow in OpenRAG","description":"OpenRAG leverages Langflow\'s Agent component to power the OpenRAG OpenSearch Agent flow.","source":"@site/docs/core-components/agents.mdx","sourceDirName":"core-components","slug":"/agents","permalink":"/agents","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/agents.mdx","tags":[],"version":"current","frontMatter":{"title":"Langflow in OpenRAG","slug":"/agents"},"sidebar":"tutorialSidebar","previous":{"title":"Quickstart","permalink":"/quickstart"},"next":{"title":"OpenSearch in OpenRAG","permalink":"/knowledge"}}');var s=t(4848),r=t(8453),i=t(1610),a=(t(1470),t(9365),t(3782));const c={title:"Langflow in OpenRAG",slug:"/agents"},l=void 0,h={},d=[{value:"Use the OpenRAG OpenSearch Agent flow",id:"flow",level:2},...a.RM,{value:"Additional Langflow functionality",id:"additional-langflow-functionality",level:2}];function p(e){const n={a:"a",code:"code",h2:"h2",img:"img",li:"li",p:"p",strong:"strong",ul:"ul",...(0,r.R)(),...e.components},{Details:o}=n;return o||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"OpenRAG leverages Langflow's Agent component to power the OpenRAG OpenSearch Agent flow."}),"\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Flows"})," in Langflow are functional representations of application workflows, with multiple ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/concepts-components",children:"component"})," nodes connected as single steps in a workflow."]}),"\n",(0,s.jsxs)(n.p,{children:["In the OpenRAG OpenSearch Agent flow, components like the Langflow ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/agents",children:[(0,s.jsx)(n.strong,{children:"Agent"})," component"]})," and ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,s.jsx)(n.strong,{children:"OpenSearch"})," component"]})," are connected to intelligently chat with your knowledge by embedding your query, comparing it the vector database embeddings, and generating a response with the LLM."]}),"\n",(0,s.jsx)(n.p,{children:(0,s.jsx)(n.img,{alt:"OpenRAG Open Search Agent Flow",src:t(3982).A+"",width:"4084",height:"2176"})}),"\n",(0,s.jsx)(n.p,{children:"The Agent component shines here in its ability to make decisions on not only what query should be sent, but when a query is necessary to solve the problem at hand."}),"\n",(0,s.jsxs)(o,{closed:!0,children:[(0,s.jsx)("summary",{children:"How do agents work?"}),(0,s.jsx)(n.p,{children:"Agents extend Large Language Models (LLMs) by integrating tools, which are functions that provide additional context and enable autonomous task execution. These integrations make agents more specialized and powerful than standalone LLMs."}),(0,s.jsx)(n.p,{children:"Whereas an LLM might generate acceptable, inert responses to general queries and tasks, an agent can leverage the integrated context and tools to provide more relevant responses and even take action. For example, you might create an agent that can access your company's documentation, repositories, and other resources to help your team with tasks that require knowledge of your specific products, customers, and code."}),(0,s.jsx)(n.p,{children:"Agents use LLMs as a reasoning engine to process input, determine which actions to take to address the query, and then generate a response. The response could be a typical text-based LLM response, or it could involve an action, like editing a file, running a script, or calling an external API."}),(0,s.jsx)(n.p,{children:"In an agentic context, tools are functions that the agent can run to perform tasks or access external resources. A function is wrapped as a Tool object with a common interface that the agent understands. Agents become aware of tools through tool registration, which is when the agent is provided a list of available tools typically at agent initialization. The Tool object's description tells the agent what the tool can do so that it can decide whether the tool is appropriate for a given request."})]}),"\n",(0,s.jsx)(n.h2,{id:"flow",children:"Use the OpenRAG OpenSearch Agent flow"}),"\n",(0,s.jsxs)(n.p,{children:["If you've chatted with your knowledge in OpenRAG, you've already experienced the OpenRAG OpenSearch Agent chat flow.\nTo switch OpenRAG over to the ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"})," and view the OpenRAG OpenSearch Agentflow, click ",(0,s.jsx)(i.A,{name:"Settings2","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Settings"}),", and then click ",(0,s.jsx)(n.strong,{children:"Edit in Langflow"}),".\nThis flow contains eight components connected together to chat with your data:"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/agents",children:[(0,s.jsx)(n.strong,{children:"Agent"})," component"]})," orchestrates the entire flow by deciding when to search the knowledge base, how to formulate search queries, and how to combine retrieved information with the user's question to generate a comprehensive response.\nThe ",(0,s.jsx)(n.strong,{children:"Agent"})," behaves according to the prompt in the ",(0,s.jsx)(n.strong,{children:"Agent Instructions"})," field."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,s.jsx)(n.strong,{children:"Chat Input"})," component"]})," is connected to the Agent component's Input port. This allows to flow to be triggered by an incoming prompt from a user or application."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,s.jsx)(n.strong,{children:"OpenSearch"})," component"]})," is connected to the Agent component's Tools port. The agent may not use this database for every request; the agent only uses this connection if it decides the knowledge can help respond to the prompt."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-models",children:[(0,s.jsx)(n.strong,{children:"Language Model"})," component"]})," is connected to the Agent component's Language Model port. The agent uses the connected LLM to reason through the request sent through Chat Input."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,s.jsx)(n.strong,{children:"Embedding Model"})," component"]})," is connected to the OpenSearch component's Embedding port. This component converts text queries into vector representations that are compared with document embeddings stored in OpenSearch for semantic similarity matching. This gives your Agent's queries context."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,s.jsx)(n.strong,{children:"Text Input"})," component"]})," is populated with the global variable ",(0,s.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"}),".\nThis filter is the ",(0,s.jsx)(n.a,{href:"/knowledge#create-knowledge-filters",children:"Knowledge filter"}),", and filters which knowledge sources to search through."]}),"\n",(0,s.jsxs)(n.li,{children:["The ",(0,s.jsx)(n.strong,{children:"Agent"})," component's Output port is connected to the ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,s.jsx)(n.strong,{children:"Chat Output"})," component"]}),", which returns the final response to the user or application."]}),"\n",(0,s.jsxs)(n.li,{children:["An ",(0,s.jsxs)(n.a,{href:"https://docs.langflow.org/mcp-client",children:[(0,s.jsx)(n.strong,{children:"MCP Tools"})," component"]})," is connected to the Agent's ",(0,s.jsx)(n.strong,{children:"Tools"})," port. This component calls the ",(0,s.jsx)(n.a,{href:"/ingestion#url-flow",children:"OpenSearch URL Ingestion flow"}),", which Langflow uses as an MCP server to fetch content from URLs and store in OpenSearch."]}),"\n"]}),"\n",(0,s.jsx)(a.Ay,{}),"\n",(0,s.jsxs)(n.p,{children:["For an example of changing out the agent's language model in OpenRAG, see the ",(0,s.jsx)(n.a,{href:"/quickstart#change-components",children:"Quickstart"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["To restore the flow to its initial state, in OpenRAG, click ",(0,s.jsx)(i.A,{name:"Settings2","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Settings"}),", and then click ",(0,s.jsx)(n.strong,{children:"Restore Flow"}),".\nOpenRAG warns you that this discards all custom settings. Click ",(0,s.jsx)(n.strong,{children:"Restore"})," to restore the flow."]}),"\n",(0,s.jsx)(n.h2,{id:"additional-langflow-functionality",children:"Additional Langflow functionality"}),"\n",(0,s.jsx)(n.p,{children:"Langflow includes features beyond Agents to help you integrate OpenRAG into your application, and all Langflow features are included in OpenRAG."}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Langflow can serve your flows as an ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/mcp-server",children:"MCP server"}),", or consume other MCP servers as an ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/mcp-client",children:"MCP client"}),". Get started with the ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/mcp-tutorial",children:"MCP tutorial"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["If you don't see the component you need, extend Langflow's functionality by creating ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/components-custom-components",children:"custom Python components"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Langflow offers component ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/components-bundle-components",children:"bundles"})," to integrate with many popular vector stores, AI/ML providers, and search APIs."]}),"\n"]}),"\n"]})]})}function g(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(p,{...e})}):p(e)}}}]); \ No newline at end of file diff --git a/assets/js/749371cc.8584455d.js b/assets/js/749371cc.8584455d.js new file mode 100644 index 00000000..2d1e0793 --- /dev/null +++ b/assets/js/749371cc.8584455d.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[2272],{887:(e,n,s)=>{s.d(n,{Ay:()=>c,RM:()=>i});var r=s(4848),l=s(8453),o=s(1470),t=s(9365);const i=[{value:"Application onboarding",id:"application-onboarding",level:2}];function a(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,l.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,r.jsxs)(n.p,{children:["The first time you start OpenRAG, whether using the TUI or a ",(0,r.jsx)(n.code,{children:".env"})," file, it's recommended that you complete application onboarding."]}),"\n",(0,r.jsxs)(n.p,{children:["To skip onboarding, click ",(0,r.jsx)(n.strong,{children:"Skip onboarding"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["Values from onboarding can be changed later in the OpenRAG ",(0,r.jsx)(n.strong,{children:"Settings"})," page."]}),"\n",(0,r.jsx)(n.p,{children:"Choose one LLM provider and complete only those steps:"}),"\n",(0,r.jsxs)(o.A,{groupId:"Provider",children:[(0,r.jsx)(t.A,{value:"OpenAI",label:"OpenAI",default:!0,children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsx)(t.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Complete the fields for ",(0,r.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,r.jsx)(n.strong,{children:"IBM Project ID"}),", and ",(0,r.jsx)(n.strong,{children:"IBM API key"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsxs)(t.A,{value:"Ollama",label:"Ollama",children:[(0,r.jsx)(n.admonition,{type:"tip",children:(0,r.jsxs)(n.p,{children:["Ollama is not included with OpenRAG. To install Ollama, see the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG automatically transforms ",(0,r.jsx)(n.code,{children:"localhost"})," to access services outside of the container, and sends a test connection to your Ollama server to confirm connectivity."]}),"\n",(0,r.jsxs)(n.li,{children:["Select the ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.\nOpenRAG retrieves the available models from your Ollama server."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function c(e={}){const{wrapper:n}={...(0,l.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(a,{...e})}):a(e)}},5788:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>h,contentTitle:()=>d,default:()=>m,frontMatter:()=>c,metadata:()=>r,toc:()=>p});const r=JSON.parse('{"id":"get-started/docker","title":"Install OpenRAG containers","description":"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments.","source":"@site/docs/get-started/docker.mdx","sourceDirName":"get-started","slug":"/get-started/docker","permalink":"/get-started/docker","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/docker.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG containers","slug":"/get-started/docker"},"sidebar":"tutorialSidebar","previous":{"title":"Install OpenRAG with TUI","permalink":"/install"},"next":{"title":"Quickstart","permalink":"/quickstart"}}');var l=s(4848),o=s(8453),t=s(1470),i=s(9365),a=s(887);const c={title:"Install OpenRAG containers",slug:"/get-started/docker"},d=void 0,h={},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Install OpenRAG with Docker Compose",id:"install-openrag-with-docker-compose",level:2},...a.RM,{value:"Container management commands",id:"container-management-commands",level:2},{value:"Upgrade containers",id:"upgrade-containers",level:3},{value:"Rebuild containers (destructive)",id:"rebuild-containers-destructive",level:3},{value:"Remove all containers and data (destructive)",id:"remove-all-containers-and-data-destructive",level:3}];function x(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",table:"table",tbody:"tbody",td:"td",th:"th",thead:"thead",tr:"tr",ul:"ul",...(0,o.R)(),...e.components};return(0,l.jsxs)(l.Fragment,{children:[(0,l.jsx)(n.p,{children:"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments."}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml",children:(0,l.jsx)(n.code,{children:"docker-compose.yml"})})," is an OpenRAG deployment with GPU support for accelerated AI processing. This Docker Compose file requires an NVIDIA GPU with ",(0,l.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support."]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml",children:(0,l.jsx)(n.code,{children:"docker-compose-cpu.yml"})})," is a CPU-only version of OpenRAG for systems without NVIDIA GPU support. Use this Docker Compose file for environments where GPU drivers aren't available."]}),"\n"]}),"\n"]}),"\n",(0,l.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python Version 3.10 to 3.13"})]}),"\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"})]}),"\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,l.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"})]}),"\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". If using Podman, use ",(0,l.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:"podman-compose"})," or alias Docker compose commands to Podman commands."]}),"\n",(0,l.jsxs)(n.li,{children:["Create an ",(0,l.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),". This key is ",(0,l.jsx)(n.strong,{children:"required"})," to start OpenRAG, but you can choose a different model provider during ",(0,l.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n",(0,l.jsxs)(n.li,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,l.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment."]}),"\n"]}),"\n",(0,l.jsx)(n.h2,{id:"install-openrag-with-docker-compose",children:"Install OpenRAG with Docker Compose"}),"\n",(0,l.jsx)(n.p,{children:"To install OpenRAG with Docker Compose, do the following:"}),"\n",(0,l.jsxs)(n.ol,{children:["\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Clone the OpenRAG repository."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"git clone https://github.com/langflow-ai/openrag.git\ncd openrag\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Install dependencies."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"uv sync\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Copy the example ",(0,l.jsx)(n.code,{children:".env"})," file included in the repository root.\nThe example file includes all environment variables with comments to guide you in finding and setting their values."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"cp .env.example .env\n"})}),"\n",(0,l.jsxs)(n.p,{children:["Alternatively, create a new ",(0,l.jsx)(n.code,{children:".env"})," file in the repository root."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{children:"touch .env\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"The Docker Compose files are populated with the values from your .env. The following values must be set:"}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"OPENSEARCH_PASSWORD=your_secure_password\nOPENAI_API_KEY=your_openai_api_key\nLANGFLOW_SUPERUSER=admin\nLANGFLOW_SUPERUSER_PASSWORD=your_langflow_password\nLANGFLOW_SECRET_KEY=your_secret_key\n"})}),"\n",(0,l.jsxs)(n.p,{children:["For more information on configuring OpenRAG with environment variables, see ",(0,l.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Start ",(0,l.jsx)(n.code,{children:"docling serve"})," on the host machine.\nOpenRAG Docker installations require that ",(0,l.jsx)(n.code,{children:"docling serve"})," is running on port 5001 on the host machine.\nThis enables ",(0,l.jsx)(n.a,{href:"https://opensource.apple.com/projects/mlx/",children:"Mac MLX"})," support for document processing."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py start --port 5001\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Confirm ",(0,l.jsx)(n.code,{children:"docling serve"})," is running."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{children:"uv run python scripts/docling_ctl.py status\n"})}),"\n",(0,l.jsxs)(n.p,{children:["Make sure the response shows that ",(0,l.jsx)(n.code,{children:"docling serve"})," is running, for example:"]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"Status: running\nEndpoint: http://127.0.0.1:5001\nDocs: http://127.0.0.1:5001/docs\nPID: 27746\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Deploy OpenRAG locally with Docker Compose based on your deployment type."}),"\n",(0,l.jsxs)(t.A,{groupId:"Compose file",children:[(0,l.jsx)(i.A,{value:"docker-compose.yml",label:"docker-compose.yml",default:!0,children:(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose build\ndocker compose up -d\n"})})}),(0,l.jsx)(i.A,{value:"docker-compose-cpu.yml",label:"docker-compose-cpu.yml",children:(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose -f docker-compose-cpu.yml up -d\n"})})})]}),"\n",(0,l.jsx)(n.p,{children:"The OpenRAG Docker Compose file starts five containers:"}),"\n",(0,l.jsxs)(n.table,{children:[(0,l.jsx)(n.thead,{children:(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.th,{children:"Container Name"}),(0,l.jsx)(n.th,{children:"Default Address"}),(0,l.jsx)(n.th,{children:"Purpose"})]})}),(0,l.jsxs)(n.tbody,{children:[(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenRAG Backend"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})}),(0,l.jsx)(n.td,{children:"FastAPI server and core functionality."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenRAG Frontend"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})}),(0,l.jsx)(n.td,{children:"React web interface for users."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"Langflow"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})}),(0,l.jsx)(n.td,{children:"AI workflow engine and flow management."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenSearch"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:9200",children:"http://localhost:9200"})}),(0,l.jsx)(n.td,{children:"Vector database for document storage."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenSearch Dashboards"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:5601",children:"http://localhost:5601"})}),(0,l.jsx)(n.td,{children:"Database administration interface."})]})]})]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Verify installation by confirming all services are running."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose ps\n"})}),"\n",(0,l.jsx)(n.p,{children:"You can now access OpenRAG at the following endpoints:"}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.strong,{children:"Frontend"}),": ",(0,l.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.strong,{children:"Backend API"}),": ",(0,l.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.strong,{children:"Langflow"}),": ",(0,l.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})]}),"\n"]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Continue with ",(0,l.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n"]}),"\n"]}),"\n",(0,l.jsxs)(n.p,{children:["To stop ",(0,l.jsx)(n.code,{children:"docling serve"})," when you're done with your OpenRAG deployment, run:"]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py stop\n"})}),"\n",(0,l.jsx)(a.Ay,{}),"\n",(0,l.jsx)(n.h2,{id:"container-management-commands",children:"Container management commands"}),"\n",(0,l.jsxs)(n.p,{children:["Manage your OpenRAG containers with the following commands.\nThese commands are also available in the TUI's ",(0,l.jsx)(n.a,{href:"/install#status",children:"Status menu"}),"."]}),"\n",(0,l.jsx)(n.h3,{id:"upgrade-containers",children:"Upgrade containers"}),"\n",(0,l.jsx)(n.p,{children:"Upgrade your containers to the latest version while preserving your data."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose pull\ndocker compose up -d --force-recreate\n"})}),"\n",(0,l.jsx)(n.h3,{id:"rebuild-containers-destructive",children:"Rebuild containers (destructive)"}),"\n",(0,l.jsxs)(n.p,{children:["Reset state by rebuilding all of your containers.\nYour OpenSearch and Langflow databases will be lost.\nDocuments stored in the ",(0,l.jsx)(n.code,{children:"./documents"})," directory will persist, since the directory is mounted as a volume in the OpenRAG backend container."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose up --build --force-recreate --remove-orphans\n"})}),"\n",(0,l.jsx)(n.h3,{id:"remove-all-containers-and-data-destructive",children:"Remove all containers and data (destructive)"}),"\n",(0,l.jsx)(n.p,{children:"Completely remove your OpenRAG installation and delete all data.\nThis deletes all of your data, including OpenSearch data, uploaded documents, and authentication."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose down --volumes --remove-orphans --rmi local\ndocker system prune -f\n"})})]})}function m(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,l.jsx)(n,{...e,children:(0,l.jsx)(x,{...e})}):x(e)}}}]); \ No newline at end of file diff --git a/assets/js/749371cc.9415889d.js b/assets/js/749371cc.9415889d.js deleted file mode 100644 index 5dd05b2a..00000000 --- a/assets/js/749371cc.9415889d.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[2272],{887:(e,n,s)=>{s.d(n,{Ay:()=>c,RM:()=>i});var r=s(4848),l=s(8453),o=s(1470),t=s(9365);const i=[{value:"Application onboarding",id:"application-onboarding",level:2}];function a(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,l.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,r.jsxs)(n.p,{children:["The first time you start OpenRAG, whether using the TUI or a ",(0,r.jsx)(n.code,{children:".env"})," file, you must complete application onboarding."]}),"\n",(0,r.jsxs)(n.p,{children:["Values from onboarding can be changed later in the OpenRAG ",(0,r.jsx)(n.strong,{children:"Settings"})," page."]}),"\n",(0,r.jsx)(n.p,{children:"Choose one LLM provider and complete only those steps:"}),"\n",(0,r.jsxs)(o.A,{groupId:"Provider",children:[(0,r.jsx)(t.A,{value:"OpenAI",label:"OpenAI",default:!0,children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsx)(t.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Complete the fields for ",(0,r.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,r.jsx)(n.strong,{children:"IBM API key"}),", and ",(0,r.jsx)(n.strong,{children:"IBM Project ID"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsxs)(t.A,{value:"Ollama",label:"Ollama",children:[(0,r.jsx)(n.admonition,{type:"tip",children:(0,r.jsxs)(n.p,{children:["Ollama is not included with OpenRAG. To install Ollama, see the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG automatically transforms ",(0,r.jsx)(n.code,{children:"localhost"})," to access services outside of the container, and sends a test connection to your Ollama server to confirm connectivity."]}),"\n",(0,r.jsxs)(n.li,{children:["Select the ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.\nOpenRAG retrieves the available models from your Ollama server."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function c(e={}){const{wrapper:n}={...(0,l.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(a,{...e})}):a(e)}},5788:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>h,contentTitle:()=>d,default:()=>m,frontMatter:()=>c,metadata:()=>r,toc:()=>p});const r=JSON.parse('{"id":"get-started/docker","title":"Install OpenRAG containers","description":"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments.","source":"@site/docs/get-started/docker.mdx","sourceDirName":"get-started","slug":"/get-started/docker","permalink":"/get-started/docker","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/docker.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG containers","slug":"/get-started/docker"},"sidebar":"tutorialSidebar","previous":{"title":"Install OpenRAG with TUI","permalink":"/install"},"next":{"title":"Quickstart","permalink":"/quickstart"}}');var l=s(4848),o=s(8453),t=s(1470),i=s(9365),a=s(887);const c={title:"Install OpenRAG containers",slug:"/get-started/docker"},d=void 0,h={},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Install OpenRAG with Docker Compose",id:"install-openrag-with-docker-compose",level:2},...a.RM,{value:"Container management commands",id:"container-management-commands",level:2},{value:"Upgrade containers",id:"upgrade-containers",level:3},{value:"Rebuild containers (destructive)",id:"rebuild-containers-destructive",level:3},{value:"Remove all containers and data (destructive)",id:"remove-all-containers-and-data-destructive",level:3}];function x(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",table:"table",tbody:"tbody",td:"td",th:"th",thead:"thead",tr:"tr",ul:"ul",...(0,o.R)(),...e.components};return(0,l.jsxs)(l.Fragment,{children:[(0,l.jsx)(n.p,{children:"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments."}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml",children:(0,l.jsx)(n.code,{children:"docker-compose.yml"})})," is an OpenRAG deployment with GPU support for accelerated AI processing. This Docker Compose file requires an NVIDIA GPU with ",(0,l.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support."]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:[(0,l.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml",children:(0,l.jsx)(n.code,{children:"docker-compose-cpu.yml"})})," is a CPU-only version of OpenRAG for systems without NVIDIA GPU support. Use this Docker Compose file for environments where GPU drivers aren't available."]}),"\n"]}),"\n"]}),"\n",(0,l.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python Version 3.10 to 3.13"})]}),"\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"})]}),"\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,l.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"})]}),"\n",(0,l.jsxs)(n.li,{children:["Install ",(0,l.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". If using Podman, use ",(0,l.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:"podman-compose"})," or alias Docker compose commands to Podman commands."]}),"\n",(0,l.jsxs)(n.li,{children:["Create an ",(0,l.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),". This key is ",(0,l.jsx)(n.strong,{children:"required"})," to start OpenRAG, but you can choose a different model provider during ",(0,l.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n",(0,l.jsxs)(n.li,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,l.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment."]}),"\n"]}),"\n",(0,l.jsx)(n.h2,{id:"install-openrag-with-docker-compose",children:"Install OpenRAG with Docker Compose"}),"\n",(0,l.jsx)(n.p,{children:"To install OpenRAG with Docker Compose, do the following:"}),"\n",(0,l.jsxs)(n.ol,{children:["\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Clone the OpenRAG repository."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"git clone https://github.com/langflow-ai/openrag.git\ncd openrag\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Install dependencies."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"uv sync\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Copy the example ",(0,l.jsx)(n.code,{children:".env"})," file included in the repository root.\nThe example file includes all environment variables with comments to guide you in finding and setting their values."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"cp .env.example .env\n"})}),"\n",(0,l.jsxs)(n.p,{children:["Alternatively, create a new ",(0,l.jsx)(n.code,{children:".env"})," file in the repository root."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{children:"touch .env\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"The Docker Compose files are populated with the values from your .env. The following values must be set:"}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"OPENSEARCH_PASSWORD=your_secure_password\nOPENAI_API_KEY=your_openai_api_key\nLANGFLOW_SUPERUSER=admin\nLANGFLOW_SUPERUSER_PASSWORD=your_langflow_password\nLANGFLOW_SECRET_KEY=your_secret_key\n"})}),"\n",(0,l.jsxs)(n.p,{children:["For more information on configuring OpenRAG with environment variables, see ",(0,l.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Start ",(0,l.jsx)(n.code,{children:"docling serve"})," on the host machine.\nOpenRAG Docker installations require that ",(0,l.jsx)(n.code,{children:"docling serve"})," is running on port 5001 on the host machine.\nThis enables ",(0,l.jsx)(n.a,{href:"https://opensource.apple.com/projects/mlx/",children:"Mac MLX"})," support for document processing."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py start --port 5001\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Confirm ",(0,l.jsx)(n.code,{children:"docling serve"})," is running."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{children:"uv run python scripts/docling_ctl.py status\n"})}),"\n",(0,l.jsxs)(n.p,{children:["Make sure the response shows that ",(0,l.jsx)(n.code,{children:"docling serve"})," is running, for example:"]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"Status: running\nEndpoint: http://127.0.0.1:5001\nDocs: http://127.0.0.1:5001/docs\nPID: 27746\n"})}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Deploy OpenRAG locally with Docker Compose based on your deployment type."}),"\n",(0,l.jsxs)(t.A,{groupId:"Compose file",children:[(0,l.jsx)(i.A,{value:"docker-compose.yml",label:"docker-compose.yml",default:!0,children:(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose build\ndocker compose up -d\n"})})}),(0,l.jsx)(i.A,{value:"docker-compose-cpu.yml",label:"docker-compose-cpu.yml",children:(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose -f docker-compose-cpu.yml up -d\n"})})})]}),"\n",(0,l.jsx)(n.p,{children:"The OpenRAG Docker Compose file starts five containers:"}),"\n",(0,l.jsxs)(n.table,{children:[(0,l.jsx)(n.thead,{children:(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.th,{children:"Container Name"}),(0,l.jsx)(n.th,{children:"Default Address"}),(0,l.jsx)(n.th,{children:"Purpose"})]})}),(0,l.jsxs)(n.tbody,{children:[(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenRAG Backend"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})}),(0,l.jsx)(n.td,{children:"FastAPI server and core functionality."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenRAG Frontend"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})}),(0,l.jsx)(n.td,{children:"React web interface for users."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"Langflow"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})}),(0,l.jsx)(n.td,{children:"AI workflow engine and flow management."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenSearch"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:9200",children:"http://localhost:9200"})}),(0,l.jsx)(n.td,{children:"Vector database for document storage."})]}),(0,l.jsxs)(n.tr,{children:[(0,l.jsx)(n.td,{children:"OpenSearch Dashboards"}),(0,l.jsx)(n.td,{children:(0,l.jsx)(n.a,{href:"http://localhost:5601",children:"http://localhost:5601"})}),(0,l.jsx)(n.td,{children:"Database administration interface."})]})]})]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsx)(n.p,{children:"Verify installation by confirming all services are running."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose ps\n"})}),"\n",(0,l.jsx)(n.p,{children:"You can now access OpenRAG at the following endpoints:"}),"\n",(0,l.jsxs)(n.ul,{children:["\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.strong,{children:"Frontend"}),": ",(0,l.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.strong,{children:"Backend API"}),": ",(0,l.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})]}),"\n",(0,l.jsxs)(n.li,{children:[(0,l.jsx)(n.strong,{children:"Langflow"}),": ",(0,l.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})]}),"\n"]}),"\n"]}),"\n",(0,l.jsxs)(n.li,{children:["\n",(0,l.jsxs)(n.p,{children:["Continue with ",(0,l.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n"]}),"\n"]}),"\n",(0,l.jsxs)(n.p,{children:["To stop ",(0,l.jsx)(n.code,{children:"docling serve"})," when you're done with your OpenRAG deployment, run:"]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py stop\n"})}),"\n",(0,l.jsx)(a.Ay,{}),"\n",(0,l.jsx)(n.h2,{id:"container-management-commands",children:"Container management commands"}),"\n",(0,l.jsxs)(n.p,{children:["Manage your OpenRAG containers with the following commands.\nThese commands are also available in the TUI's ",(0,l.jsx)(n.a,{href:"/install#status",children:"Status menu"}),"."]}),"\n",(0,l.jsx)(n.h3,{id:"upgrade-containers",children:"Upgrade containers"}),"\n",(0,l.jsx)(n.p,{children:"Upgrade your containers to the latest version while preserving your data."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose pull\ndocker compose up -d --force-recreate\n"})}),"\n",(0,l.jsx)(n.h3,{id:"rebuild-containers-destructive",children:"Rebuild containers (destructive)"}),"\n",(0,l.jsxs)(n.p,{children:["Reset state by rebuilding all of your containers.\nYour OpenSearch and Langflow databases will be lost.\nDocuments stored in the ",(0,l.jsx)(n.code,{children:"./documents"})," directory will persist, since the directory is mounted as a volume in the OpenRAG backend container."]}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose up --build --force-recreate --remove-orphans\n"})}),"\n",(0,l.jsx)(n.h3,{id:"remove-all-containers-and-data-destructive",children:"Remove all containers and data (destructive)"}),"\n",(0,l.jsx)(n.p,{children:"Completely remove your OpenRAG installation and delete all data.\nThis deletes all of your data, including OpenSearch data, uploaded documents, and authentication."}),"\n",(0,l.jsx)(n.pre,{children:(0,l.jsx)(n.code,{className:"language-bash",children:"docker compose down --volumes --remove-orphans --rmi local\ndocker system prune -f\n"})})]})}function m(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,l.jsx)(n,{...e,children:(0,l.jsx)(x,{...e})}):x(e)}}}]); \ No newline at end of file diff --git a/assets/js/af920ffe.02db3b49.js b/assets/js/af920ffe.02db3b49.js deleted file mode 100644 index d5ca9b38..00000000 --- a/assets/js/af920ffe.02db3b49.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[98],{1260:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>h,contentTitle:()=>c,default:()=>g,frontMatter:()=>d,metadata:()=>s,toc:()=>p});const s=JSON.parse('{"id":"get-started/quickstart","title":"Quickstart","description":"Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the OpenRAG API.","source":"@site/docs/get-started/quickstart.mdx","sourceDirName":"get-started","slug":"/quickstart","permalink":"/quickstart","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/quickstart.mdx","tags":[],"version":"current","frontMatter":{"title":"Quickstart","slug":"/quickstart"},"sidebar":"tutorialSidebar","previous":{"title":"Install OpenRAG containers","permalink":"/get-started/docker"},"next":{"title":"Langflow in OpenRAG","permalink":"/agents"}}');var r=t(4848),i=t(8453),o=t(1610),a=t(1470),l=t(9365);const d={title:"Quickstart",slug:"/quickstart"},c=void 0,h={},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Load and chat with your own documents",id:"load-and-chat-with-your-own-documents",level:2},{value:"Swap out the language model to modify agent behavior",id:"change-components",level:2},{value:"Integrate OpenRAG into your application",id:"integrate-openrag-into-your-application",level:2}];function u(e){const n={a:"a",code:"code",h2:"h2",img:"img",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the OpenRAG API."}),"\n",(0,r.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["Install and start OpenRAG with the ",(0,r.jsx)(n.a,{href:"/install",children:"TUI"})," or ",(0,r.jsx)(n.a,{href:"/get-started/docker",children:"Docker"})]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"load-and-chat-with-your-own-documents",children:"Load and chat with your own documents"}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["In OpenRAG, click ",(0,r.jsx)(o.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Chat"}),".\nThe chat is powered by the OpenRAG OpenSearch Agent.\nFor more information, see ",(0,r.jsx)(n.a,{href:"/agents",children:"Langflow Agents"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Ask ",(0,r.jsx)(n.code,{children:"What documents are available to you?"}),"\nThe agent responds with a message summarizing the documents that OpenRAG loads by default.\nKnowledge is stored in OpenSearch.\nFor more information, see ",(0,r.jsx)(n.a,{href:"/knowledge",children:"Knowledge"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To confirm the agent is correct about the default knowledge, click ",(0,r.jsx)(o.A,{name:"Library","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Knowledge"}),".\nThe ",(0,r.jsx)(n.strong,{children:"Knowledge"})," page lists the documents OpenRAG has ingested into the OpenSearch vector database.\nClick on a document to display the chunks derived from splitting the default documents into the vector database."]}),"\n",(0,r.jsxs)(n.li,{children:["To add documents to your knowledge base, click ",(0,r.jsx)(o.A,{name:"Plus","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Add Knowledge"}),".","\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["Select ",(0,r.jsx)(n.strong,{children:"Add File"})," to add a single file from your local machine."]}),"\n",(0,r.jsxs)(n.li,{children:["Select ",(0,r.jsx)(n.strong,{children:"Process Folder"})," to process an entire folder of documents from your local machine."]}),"\n",(0,r.jsxs)(n.li,{children:["Select your cloud storage provider to add knowledge from an OAuth-connected storage provider. For more information, see ",(0,r.jsx)(n.a,{href:"/knowledge#oauth-ingestion",children:"OAuth ingestion"}),"."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["Return to the Chat window and ask a question about your loaded data.\nFor example, with a manual about a PC tablet loaded, ask ",(0,r.jsx)(n.code,{children:"How do I connect this device to WiFI?"}),"\nThe agent responds with a message indicating it now has your knowledge as context for answering questions."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(o.A,{name:"Gear","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Function Call: search_documents (tool_call)"}),".\nThis log describes how the agent uses tools.\nThis is helpful for troubleshooting when the agent isn't responding as expected."]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"change-components",children:"Swap out the language model to modify agent behavior"}),"\n",(0,r.jsxs)(n.p,{children:["To modify the knowledge ingestion or Agent behavior, click ",(0,r.jsx)(o.A,{name:"Settings2","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"In this example, you'll try a different LLM to demonstrate how the Agent's response changes."}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["To edit the Agent's behavior, click ",(0,r.jsx)(n.strong,{children:"Edit in Langflow"}),".\nYou can more quickly access the ",(0,r.jsx)(n.strong,{children:"Language Model"})," and ",(0,r.jsx)(n.strong,{children:"Agent Instructions"})," fields in this page, but for illustration purposes, navigate to the Langflow visual builder."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["OpenRAG warns you that you're entering Langflow. Click ",(0,r.jsx)(n.strong,{children:"Proceed"}),".\nThe OpenRAG OpenSearch Agent flow appears in a new browser window.\n",(0,r.jsx)(n.img,{alt:"OpenRAG Open Search Agent Flow",src:t(3982).A+"",width:"4084",height:"2176"})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Find the ",(0,r.jsx)(n.strong,{children:"Language Model"})," component, and then change the ",(0,r.jsx)(n.strong,{children:"Model Name"})," field to a different OpenAI model."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Save your flow with ",(0,r.jsx)("kbd",{children:"Command+S"})," (Mac) or ",(0,r.jsx)("kbd",{children:"Ctrl+S"})," (Windows)."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Return to the OpenRAG browser window, and start a new conversation by clicking ",(0,r.jsx)(o.A,{name:"Plus","aria-hidden":"true"})," in the ",(0,r.jsx)(n.strong,{children:"Conversations"})," tab."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Ask the same question you asked before to see how the response differs between models."}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"integrate-openrag-into-your-application",children:"Integrate OpenRAG into your application"}),"\n",(0,r.jsxs)(n.p,{children:["Langflow in OpenRAG includes pre-built flows that you can integrate into your applications using the ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-reference-api-examples",children:"Langflow API"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"The Langflow API accepts Python, TypeScript, or curl requests to run flows and get responses. You can use these flows as-is or modify them to better suit your needs."}),"\n",(0,r.jsx)(n.p,{children:"In this section, you'll run the OpenRAG OpenSearch Agent flow and get a response using the API."}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["To navigate to the OpenRAG OpenSearch Agent flow in Langflow, click ",(0,r.jsx)(o.A,{name:"Settings2","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Settings"}),", and then click ",(0,r.jsx)(n.strong,{children:"Edit in Langflow"})," in the OpenRAG OpenSearch Agent flow."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Create a ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication",children:"Langflow API key"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["A Langflow API key is a user-specific token you can use with Langflow.\nIt is ",(0,r.jsx)(n.strong,{children:"only"})," used for sending requests to the Langflow server.\nIt does ",(0,r.jsx)(n.strong,{children:"not"})," access OpenRAG."]}),"\n",(0,r.jsx)(n.p,{children:"To create a Langflow API key, do the following:"}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Open Langflow, click your user icon, and then select ",(0,r.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Langflow API Keys"}),", and then click ",(0,r.jsx)(o.A,{name:"Plus","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Add New"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Name your key, and then click ",(0,r.jsx)(n.strong,{children:"Create API Key"}),"."]}),"\n",(0,r.jsx)(n.li,{children:"Copy the API key and store it securely."}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Langflow includes code snippets for the request to the Langflow API.\nTo retrieve the code snippet, click ",(0,r.jsx)(n.strong,{children:"Share"}),", and then click ",(0,r.jsx)(n.strong,{children:"API access"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["The default code in the API access pane constructs a request with the Langflow server ",(0,r.jsx)(n.code,{children:"url"}),", ",(0,r.jsx)(n.code,{children:"headers"}),", and a ",(0,r.jsx)(n.code,{children:"payload"})," of request data. The code snippets automatically include the ",(0,r.jsx)(n.code,{children:"LANGFLOW_SERVER_ADDRESS"})," and ",(0,r.jsx)(n.code,{children:"FLOW_ID"})," values for the flow."]}),"\n",(0,r.jsxs)(a.A,{children:[(0,r.jsx)(l.A,{value:"python",label:"Python",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-python",children:'import requests\nimport os\nimport uuid\n\napi_key = \'LANGFLOW_API_KEY\'\nurl = "http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID" # The complete API endpoint URL for this flow\n\n# Request payload configuration\npayload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n}\npayload["session_id"] = str(uuid.uuid4())\n\nheaders = {"x-api-key": api_key}\n\ntry:\n # Send API request\n response = requests.request("POST", url, json=payload, headers=headers)\n response.raise_for_status() # Raise exception for bad status codes\n\n # Print response\n print(response.text)\n\nexcept requests.exceptions.RequestException as e:\n print(f"Error making API request: {e}")\nexcept ValueError as e:\n print(f"Error parsing response: {e}")\n'})})}),(0,r.jsx)(l.A,{value:"typescript",label:"TypeScript",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-typescript",children:'const crypto = require(\'crypto\');\nconst apiKey = \'LANGFLOW_API_KEY\';\nconst payload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n};\npayload.session_id = crypto.randomUUID();\n\nconst options = {\n method: \'POST\',\n headers: {\n \'Content-Type\': \'application/json\',\n "x-api-key": apiKey\n },\n body: JSON.stringify(payload)\n};\n\nfetch(\'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID\', options)\n .then(response => response.json())\n .then(response => console.warn(response))\n .catch(err => console.error(err));\n'})})}),(0,r.jsx)(l.A,{value:"curl",label:"curl",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:'curl --request POST \\\n --url \'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID?stream=false\' \\\n --header \'Content-Type: application/json\' \\\n --header "x-api-key: LANGFLOW_API_KEY" \\\n --data \'{\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n }\'\n'})})})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Copy the snippet, paste it in a script file, and then run the script to send the request. If you are using the curl snippet, you can run the command directly in your terminal."}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.p,{children:"If the request is successful, the response includes many details about the flow run, including the session ID, inputs, outputs, components, durations, and more."}),"\n",(0,r.jsx)(n.p,{children:"To further explore the API, see:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["The Langflow ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/quickstart#extract-data-from-the-response",children:"Quickstart"})," extends this example with extracting fields from the response."]}),"\n",(0,r.jsx)(n.li,{children:(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-reference-api-examples",children:"Get started with the Langflow API"})}),"\n"]})]})}function g(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(u,{...e})}):u(e)}},3982:(e,n,t)=>{t.d(n,{A:()=>s});const s=t.p+"assets/images/opensearch-agent-flow-f3b279e02425cd043002eb7749067108.png"}}]); \ No newline at end of file diff --git a/assets/js/af920ffe.059a83cf.js b/assets/js/af920ffe.059a83cf.js new file mode 100644 index 00000000..14038f05 --- /dev/null +++ b/assets/js/af920ffe.059a83cf.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[98],{1260:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>h,contentTitle:()=>c,default:()=>g,frontMatter:()=>d,metadata:()=>s,toc:()=>p});const s=JSON.parse('{"id":"get-started/quickstart","title":"Quickstart","description":"Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the Langflow API.","source":"@site/docs/get-started/quickstart.mdx","sourceDirName":"get-started","slug":"/quickstart","permalink":"/quickstart","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/quickstart.mdx","tags":[],"version":"current","frontMatter":{"title":"Quickstart","slug":"/quickstart"},"sidebar":"tutorialSidebar","previous":{"title":"Install OpenRAG containers","permalink":"/get-started/docker"},"next":{"title":"Langflow in OpenRAG","permalink":"/agents"}}');var r=t(4848),i=t(8453),o=t(1610),a=t(1470),l=t(9365);const d={title:"Quickstart",slug:"/quickstart"},c=void 0,h={},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Load and chat with your own documents",id:"load-and-chat-with-your-own-documents",level:2},{value:"Swap out the language model to modify agent behavior",id:"change-components",level:2},{value:"Integrate OpenRAG into your application",id:"integrate-openrag-into-your-application",level:2}];function u(e){const n={a:"a",code:"code",h2:"h2",img:"img",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the Langflow API."}),"\n",(0,r.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["Install and start OpenRAG with the ",(0,r.jsx)(n.a,{href:"/install",children:"TUI"})," or ",(0,r.jsx)(n.a,{href:"/get-started/docker",children:"Docker"})]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"load-and-chat-with-your-own-documents",children:"Load and chat with your own documents"}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["In OpenRAG, click ",(0,r.jsx)(o.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Chat"}),".\nThe chat is powered by the OpenRAG OpenSearch Agent.\nFor more information, see ",(0,r.jsx)(n.a,{href:"/agents",children:"Langflow in OpenRAG"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Ask ",(0,r.jsx)(n.code,{children:"What documents are available to you?"}),"\nThe agent responds with a message summarizing the documents that OpenRAG loads by default.\nKnowledge is stored in OpenSearch.\nFor more information, see ",(0,r.jsx)(n.a,{href:"/knowledge",children:"OpenSearch in OpenRAG"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To confirm the agent is correct about the default knowledge, click ",(0,r.jsx)(o.A,{name:"Library","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Knowledge"}),".\nThe ",(0,r.jsx)(n.strong,{children:"Knowledge"})," page lists the documents OpenRAG has ingested into the OpenSearch vector database.\nClick on a document to display the chunks derived from splitting the default documents into the OpenSearch vector database."]}),"\n",(0,r.jsxs)(n.li,{children:["To add documents to your knowledge base, click ",(0,r.jsx)(n.strong,{children:"Add Knowledge"}),".","\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["Select ",(0,r.jsx)(o.A,{name:"File","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"File"})," to add a single file from your local machine."]}),"\n",(0,r.jsxs)(n.li,{children:["Select ",(0,r.jsx)(o.A,{name:"Folder","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Folder"})," to process an entire folder of documents from your local machine. The default directory is ",(0,r.jsx)(n.code,{children:"/documents"})," in your OpenRAG directory."]}),"\n",(0,r.jsxs)(n.li,{children:["Select your cloud storage provider to add knowledge from an OAuth-connected storage provider. For more information, see ",(0,r.jsx)(n.a,{href:"/knowledge#oauth-ingestion",children:"OAuth ingestion"}),"."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["Return to the Chat window and ask a question about your loaded data.\nFor example, with a manual about a PC tablet loaded, ask ",(0,r.jsx)(n.code,{children:"How do I connect this device to WiFi?"}),"\nThe agent responds with a message indicating it now has your knowledge as context for answering questions."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(o.A,{name:"Gear","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Function Call: search_documents (tool_call)"}),".\nThis log describes how the agent uses tools.\nThis is helpful for troubleshooting when the agent isn't responding as expected."]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"change-components",children:"Swap out the language model to modify agent behavior"}),"\n",(0,r.jsxs)(n.p,{children:["To modify the knowledge ingestion or Agent behavior, click ",(0,r.jsx)(o.A,{name:"Settings2","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"In this example, you'll try a different LLM to demonstrate how the Agent's response changes."}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["To edit the Agent's behavior, click ",(0,r.jsx)(n.strong,{children:"Edit in Langflow"}),".\nYou can more quickly access the ",(0,r.jsx)(n.strong,{children:"Language Model"})," and ",(0,r.jsx)(n.strong,{children:"Agent Instructions"})," fields in this page, but for illustration purposes, navigate to the Langflow visual builder.\nTo revert the flow to its initial state, click ",(0,r.jsx)(n.strong,{children:"Restore flow"}),"."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["OpenRAG warns you that you're entering Langflow. Click ",(0,r.jsx)(n.strong,{children:"Proceed"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["If Langflow requests login information, enter the ",(0,r.jsx)(n.code,{children:"LANGFLOW_SUPERUSER"})," and ",(0,r.jsx)(n.code,{children:"LANGFLOW_SUPERUSER_PASSWORD"})," from the ",(0,r.jsx)(n.code,{children:".env"})," file in your OpenRAG directory."]}),"\n",(0,r.jsxs)(n.p,{children:["The OpenRAG OpenSearch Agent flow appears in a new browser window.\n",(0,r.jsx)(n.img,{alt:"OpenRAG Open Search Agent Flow",src:t(3982).A+"",width:"4084",height:"2176"})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Find the ",(0,r.jsx)(n.strong,{children:"Language Model"})," component, and then change the ",(0,r.jsx)(n.strong,{children:"Model Name"})," field to a different OpenAI model."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Save your flow with ",(0,r.jsx)("kbd",{children:"Command+S"})," (Mac) or ",(0,r.jsx)("kbd",{children:"Ctrl+S"})," (Windows)."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Return to the OpenRAG browser window, and start a new conversation by clicking ",(0,r.jsx)(o.A,{name:"Plus","aria-hidden":"true"})," in the ",(0,r.jsx)(n.strong,{children:"Conversations"})," tab."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Ask the same question you asked before to see how the response differs between models."}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"integrate-openrag-into-your-application",children:"Integrate OpenRAG into your application"}),"\n",(0,r.jsxs)(n.p,{children:["Langflow in OpenRAG includes pre-built flows that you can integrate into your applications using the ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-reference-api-examples",children:"Langflow API"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"The Langflow API accepts Python, TypeScript, or curl requests to run flows and get responses. You can use these flows as-is or modify them to better suit your needs."}),"\n",(0,r.jsx)(n.p,{children:"In this section, you'll run the OpenRAG OpenSearch Agent flow and get a response using the API."}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["To navigate to the OpenRAG OpenSearch Agent flow in Langflow, click ",(0,r.jsx)(o.A,{name:"Settings2","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Settings"}),", and then click ",(0,r.jsx)(n.strong,{children:"Edit in Langflow"})," in the OpenRAG OpenSearch Agent flow."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Create a ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication",children:"Langflow API key"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["A Langflow API key is a user-specific token you can use with Langflow.\nIt is ",(0,r.jsx)(n.strong,{children:"only"})," used for sending requests to the Langflow server.\nIt does ",(0,r.jsx)(n.strong,{children:"not"})," access OpenRAG."]}),"\n",(0,r.jsx)(n.p,{children:"To create a Langflow API key, do the following:"}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Open Langflow, click your user icon, and then select ",(0,r.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Langflow API Keys"}),", and then click ",(0,r.jsx)(o.A,{name:"Plus","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Add New"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Name your key, and then click ",(0,r.jsx)(n.strong,{children:"Create API Key"}),"."]}),"\n",(0,r.jsx)(n.li,{children:"Copy the API key and store it securely."}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Langflow includes code snippets for the request to the Langflow API.\nTo retrieve the code snippet, click ",(0,r.jsx)(n.strong,{children:"Share"}),", and then click ",(0,r.jsx)(n.strong,{children:"API access"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["The default code in the API access pane constructs a request with the Langflow server ",(0,r.jsx)(n.code,{children:"url"}),", ",(0,r.jsx)(n.code,{children:"headers"}),", and a ",(0,r.jsx)(n.code,{children:"payload"})," of request data. The code snippets automatically include the ",(0,r.jsx)(n.code,{children:"LANGFLOW_SERVER_ADDRESS"})," and ",(0,r.jsx)(n.code,{children:"FLOW_ID"})," values for the flow."]}),"\n",(0,r.jsxs)(a.A,{children:[(0,r.jsx)(l.A,{value:"python",label:"Python",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-python",children:'import requests\nimport os\nimport uuid\n\napi_key = \'LANGFLOW_API_KEY\'\nurl = "http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID" # The complete API endpoint URL for this flow\n\n# Request payload configuration\npayload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n}\npayload["session_id"] = str(uuid.uuid4())\n\nheaders = {"x-api-key": api_key}\n\ntry:\n # Send API request\n response = requests.request("POST", url, json=payload, headers=headers)\n response.raise_for_status() # Raise exception for bad status codes\n\n # Print response\n print(response.text)\n\nexcept requests.exceptions.RequestException as e:\n print(f"Error making API request: {e}")\nexcept ValueError as e:\n print(f"Error parsing response: {e}")\n'})})}),(0,r.jsx)(l.A,{value:"typescript",label:"TypeScript",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-typescript",children:'const crypto = require(\'crypto\');\nconst apiKey = \'LANGFLOW_API_KEY\';\nconst payload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n};\npayload.session_id = crypto.randomUUID();\n\nconst options = {\n method: \'POST\',\n headers: {\n \'Content-Type\': \'application/json\',\n "x-api-key": apiKey\n },\n body: JSON.stringify(payload)\n};\n\nfetch(\'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID\', options)\n .then(response => response.json())\n .then(response => console.warn(response))\n .catch(err => console.error(err));\n'})})}),(0,r.jsx)(l.A,{value:"curl",label:"curl",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:'curl --request POST \\\n --url \'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID?stream=false\' \\\n --header \'Content-Type: application/json\' \\\n --header "x-api-key: LANGFLOW_API_KEY" \\\n --data \'{\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n }\'\n'})})})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Copy the snippet, paste it in a script file, and then run the script to send the request. If you are using the curl snippet, you can run the command directly in your terminal."}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.p,{children:"If the request is successful, the response includes many details about the flow run, including the session ID, inputs, outputs, components, durations, and more."}),"\n",(0,r.jsx)(n.p,{children:"To further explore the API, see:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["The Langflow ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/quickstart#extract-data-from-the-response",children:"Quickstart"})," extends this example with extracting fields from the response."]}),"\n",(0,r.jsx)(n.li,{children:(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-reference-api-examples",children:"Get started with the Langflow API"})}),"\n"]})]})}function g(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(u,{...e})}):u(e)}},3982:(e,n,t)=>{t.d(n,{A:()=>s});const s=t.p+"assets/images/opensearch-agent-flow-f3b279e02425cd043002eb7749067108.png"}}]); \ No newline at end of file diff --git a/assets/js/ca2c3c0c.4d983365.js b/assets/js/ca2c3c0c.4d983365.js new file mode 100644 index 00000000..5d029716 --- /dev/null +++ b/assets/js/ca2c3c0c.4d983365.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[6919],{3782:(e,n,s)=>{s.d(n,{Ay:()=>l,RM:()=>r});var o=s(4848),t=s(8453),i=s(1610);const r=[];function c(e){const n={a:"a",p:"p",strong:"strong",...(0,t.R)(),...e.components};return(0,o.jsxs)(n.p,{children:["All flows included with OpenRAG are designed to be modular, performant, and provider-agnostic.\nTo modify a flow, click ",(0,o.jsx)(i.A,{name:"Settings2","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Settings"}),", and click ",(0,o.jsx)(n.strong,{children:"Edit in Langflow"}),".\nOpenRAG's visual editor is based on the ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"}),", so you can edit your flows to match your specific use case."]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(c,{...e})}):c(e)}},7125:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>a,contentTitle:()=>l,default:()=>g,frontMatter:()=>c,metadata:()=>o,toc:()=>d});const o=JSON.parse('{"id":"core-components/ingestion","title":"Docling in OpenRAG","description":"OpenRAG uses Docling for document ingestion.","source":"@site/docs/core-components/ingestion.mdx","sourceDirName":"core-components","slug":"/ingestion","permalink":"/ingestion","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/ingestion.mdx","tags":[],"version":"current","frontMatter":{"title":"Docling in OpenRAG","slug":"/ingestion"},"sidebar":"tutorialSidebar","previous":{"title":"OpenSearch in OpenRAG","permalink":"/knowledge"},"next":{"title":"Environment variables","permalink":"/reference/configuration"}}');var t=s(4848),i=s(8453),r=(s(1610),s(1470),s(9365),s(3782));const c={title:"Docling in OpenRAG",slug:"/ingestion"},l=void 0,a={},d=[{value:"Knowledge ingestion settings",id:"knowledge-ingestion-settings",level:2},{value:"Knowledge ingestion flows",id:"knowledge-ingestion-flows",level:2},...r.RM,{value:"OpenSearch URL Ingestion flow",id:"url-flow",level:3},{value:"Use OpenRAG default ingestion instead of Docling serve",id:"use-openrag-default-ingestion-instead-of-docling-serve",level:2}];function h(e){const n={a:"a",br:"br",code:"code",h2:"h2",h3:"h3",li:"li",p:"p",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,t.jsxs)(t.Fragment,{children:[(0,t.jsxs)(n.p,{children:["OpenRAG uses ",(0,t.jsx)(n.a,{href:"https://docling-project.github.io/docling/",children:"Docling"})," for document ingestion.\nMore specifically, OpenRAG uses ",(0,t.jsx)(n.a,{href:"https://github.com/docling-project/docling-serve",children:"Docling Serve"}),", which starts a ",(0,t.jsx)(n.code,{children:"docling serve"})," process on your local machine and runs Docling ingestion through an API service."]}),"\n",(0,t.jsxs)(n.p,{children:["Docling ingests documents from your local machine or OAuth connectors, splits them into chunks, and stores them as separate, structured documents in the OpenSearch ",(0,t.jsx)(n.code,{children:"documents"})," index."]}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG chose Docling for its support for a wide variety of file formats, high performance, and advanced understanding of tables and images."}),"\n",(0,t.jsxs)(n.p,{children:['To modify OpenRAG\'s ingestion settings, including the Docling settings and ingestion flows, click 2" aria-hidden="true"/> ',(0,t.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"knowledge-ingestion-settings",children:"Knowledge ingestion settings"}),"\n",(0,t.jsx)(n.p,{children:"These settings configure the Docling ingestion parameters."}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG will warn you if ",(0,t.jsx)(n.code,{children:"docling serve"})," is not running.\nTo start or stop ",(0,t.jsx)(n.code,{children:"docling serve"})," or any other native services, in the TUI main menu, click ",(0,t.jsx)(n.strong,{children:"Start Native Services"})," or ",(0,t.jsx)(n.strong,{children:"Stop Native Services"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Embedding model"})," determines which AI model is used to create vector embeddings. The default is the OpenAI ",(0,t.jsx)(n.code,{children:"text-embedding-3-small"})," model."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Chunk size"})," determines how large each text chunk is in number of characters.\nLarger chunks yield more context per chunk, but may include irrelevant information. Smaller chunks yield more precise semantic search, but may lack context.\nThe default value of ",(0,t.jsx)(n.code,{children:"1000"})," characters provides a good starting point that balances these considerations."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Chunk overlap"})," controls the number of characters that overlap over chunk boundaries.\nUse larger overlap values for documents where context is most important, and use smaller overlap values for simpler documents, or when optimization is most important.\nThe default value of 200 characters of overlap with a chunk size of 1000 (20% overlap) is suitable for general use cases. Decrease the overlap to 10% for a more efficient pipeline, or increase to 40% for more complex documents."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Table Structure"})," enables Docling's ",(0,t.jsx)(n.a,{href:"https://docling-project.github.io/docling/reference/document_converter/",children:(0,t.jsx)(n.code,{children:"DocumentConverter"})})," tool for parsing tables. Instead of treating tables as plain text, tables are output as structured table data with preserved relationships and metadata. ",(0,t.jsx)(n.strong,{children:"Table Structure"})," is enabled by default."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"OCR"})," enables or disabled OCR processing when extracting text from images and scanned documents.\nOCR is disabled by default. This setting is best suited for processing text-based documents as quickly as possible with Docling's ",(0,t.jsx)(n.a,{href:"https://docling-project.github.io/docling/reference/document_converter/",children:(0,t.jsx)(n.code,{children:"DocumentConverter"})}),". Images are ignored and not processed."]}),"\n",(0,t.jsx)(n.p,{children:"Enable OCR when you are processing documents containing images with text that requires extraction, or for scanned documents. Enabling OCR can slow ingestion performance."}),"\n",(0,t.jsxs)(n.p,{children:["If OpenRAG detects that the local machine is running on macOS, OpenRAG uses the ",(0,t.jsx)(n.a,{href:"https://www.piwheels.org/project/ocrmac/",children:"ocrmac"})," OCR engine. Other platforms use ",(0,t.jsx)(n.a,{href:"https://www.jaided.ai/easyocr/",children:"easyocr"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Picture descriptions"})," adds image descriptions generated by the ",(0,t.jsx)(n.a,{href:"https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct",children:"SmolVLM-256M-Instruct"})," model to OCR processing. Enabling picture descriptions can slow ingestion performance."]}),"\n",(0,t.jsx)(n.h2,{id:"knowledge-ingestion-flows",children:"Knowledge ingestion flows"}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Flows"})," in Langflow are functional representations of application workflows, with multiple ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/concepts-components",children:"component"})," nodes connected as single steps in a workflow."]}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow is the default knowledge ingestion flow in OpenRAG: when you ",(0,t.jsx)(n.strong,{children:"Add Knowledge"})," in OpenRAG, you run the OpenSearch Ingestion flow in the background. The flow ingests documents using ",(0,t.jsx)(n.strong,{children:"Docling Serve"})," to import and process documents."]}),"\n",(0,t.jsx)(n.p,{children:"This flow contains ten components connected together to process and store documents in your knowledge base."}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-docling",children:[(0,t.jsx)(n.strong,{children:"Docling Serve"})," component"]})," processes input documents by connecting to your instance of Docling Serve."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-docling",children:[(0,t.jsx)(n.strong,{children:"Export DoclingDocument"})," component"]})," exports the processed DoclingDocument to markdown format with image export mode set to placeholder. This conversion makes the structured document data into a standardized format for further processing."]}),"\n",(0,t.jsxs)(n.li,{children:["Three ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#dataframe-operations",children:[(0,t.jsx)(n.strong,{children:"DataFrame Operations"})," components"]})," sequentially add metadata columns to the document data of ",(0,t.jsx)(n.code,{children:"filename"}),", ",(0,t.jsx)(n.code,{children:"file_size"}),", and ",(0,t.jsx)(n.code,{children:"mimetype"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#split-text",children:[(0,t.jsx)(n.strong,{children:"Split Text"})," component"]})," splits the processed text into chunks with a chunk size of 1000 characters and an overlap of 200 characters."]}),"\n",(0,t.jsxs)(n.li,{children:["Four ",(0,t.jsx)(n.strong,{children:"Secret Input"})," components provide secure access to configuration variables: ",(0,t.jsx)(n.code,{children:"CONNECTOR_TYPE"}),", ",(0,t.jsx)(n.code,{children:"OWNER"}),", ",(0,t.jsx)(n.code,{children:"OWNER_EMAIL"}),", and ",(0,t.jsx)(n.code,{children:"OWNER_NAME"}),". These are runtime variables populated from OAuth login."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsx)(n.strong,{children:"Create Data"})," component combines the secret inputs into a structured data object that will be associated with the document embeddings."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,t.jsx)(n.strong,{children:"Embedding Model"})," component"]})," generates vector embeddings using OpenAI's ",(0,t.jsx)(n.code,{children:"text-embedding-3-small"})," model. The embedding model is selected at [Application onboarding] and cannot be changed."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,t.jsx)(n.strong,{children:"OpenSearch"})," component"]})," stores the processed documents and their embeddings in the ",(0,t.jsx)(n.code,{children:"documents"})," index at ",(0,t.jsx)(n.code,{children:"https://opensearch:9200"}),". By default, the component is authenticated with a JWT token, but you can also select ",(0,t.jsx)(n.code,{children:"basic"})," auth mode, and enter your OpenSearch admin username and password."]}),"\n"]}),"\n",(0,t.jsx)(r.Ay,{}),"\n",(0,t.jsx)(n.h3,{id:"url-flow",children:"OpenSearch URL Ingestion flow"}),"\n",(0,t.jsxs)(n.p,{children:["An additional knowledge ingestion flow is included in OpenRAG, where it is used as an MCP tool by the ",(0,t.jsx)(n.a,{href:"/agents#flow",children:(0,t.jsx)(n.strong,{children:"Open Search Agent flow"})}),".\nThe agent calls this component to fetch web content, and the results are ingested into OpenSearch."]}),"\n",(0,t.jsxs)(n.p,{children:["For more on using MCP clients in Langflow, see ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-client",children:"MCP clients"}),".",(0,t.jsx)(n.br,{}),"\n","To connect additional MCP servers to the MCP client, see ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-tutorial",children:"Connect to MCP servers from your application"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"use-openrag-default-ingestion-instead-of-docling-serve",children:"Use OpenRAG default ingestion instead of Docling serve"}),"\n",(0,t.jsxs)(n.p,{children:["If you want to use OpenRAG's built-in pipeline instead of Docling serve, set ",(0,t.jsx)(n.code,{children:"DISABLE_INGEST_WITH_LANGFLOW=true"})," in ",(0,t.jsx)(n.a,{href:"/reference/configuration#document-processing",children:"Environment variables"}),"."]}),"\n",(0,t.jsx)(n.p,{children:"The built-in pipeline still uses the Docling processor, but uses it directly without the Docling Serve API."}),"\n",(0,t.jsxs)(n.p,{children:["For more information, see ",(0,t.jsxs)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/models/processors.py#L58",children:[(0,t.jsx)(n.code,{children:"processors.py"})," in the OpenRAG repository"]}),"."]})]})}function g(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(h,{...e})}):h(e)}}}]); \ No newline at end of file diff --git a/assets/js/ca2c3c0c.71b9827a.js b/assets/js/ca2c3c0c.71b9827a.js deleted file mode 100644 index db841a3d..00000000 --- a/assets/js/ca2c3c0c.71b9827a.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[6919],{3782:(e,n,s)=>{s.d(n,{Ay:()=>l,RM:()=>r});var o=s(4848),t=s(8453),i=s(1610);const r=[];function c(e){const n={a:"a",p:"p",strong:"strong",...(0,t.R)(),...e.components};return(0,o.jsxs)(n.p,{children:["All flows included with OpenRAG are designed to be modular, performant, and provider-agnostic.\nTo modify a flow, click ",(0,o.jsx)(i.A,{name:"Settings2","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Settings"}),", and click ",(0,o.jsx)(n.strong,{children:"Edit in Langflow"}),".\nOpenRAG's visual editor is based on the ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Langflow visual editor"}),", so you can edit your flows to match your specific use case."]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(c,{...e})}):c(e)}},7125:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>a,contentTitle:()=>l,default:()=>p,frontMatter:()=>c,metadata:()=>o,toc:()=>d});const o=JSON.parse('{"id":"core-components/ingestion","title":"Docling in OpenRAG","description":"OpenRAG uses Docling for its document ingestion pipeline.","source":"@site/docs/core-components/ingestion.mdx","sourceDirName":"core-components","slug":"/ingestion","permalink":"/ingestion","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/ingestion.mdx","tags":[],"version":"current","frontMatter":{"title":"Docling in OpenRAG","slug":"/ingestion"},"sidebar":"tutorialSidebar","previous":{"title":"OpenSearch in OpenRAG","permalink":"/knowledge"},"next":{"title":"Environment variables","permalink":"/reference/configuration"}}');var t=s(4848),i=s(8453),r=(s(1610),s(1470),s(9365),s(3782));const c={title:"Docling in OpenRAG",slug:"/ingestion"},l=void 0,a={},d=[{value:"Docling ingestion settings",id:"docling-ingestion-settings",level:2},{value:"Use OpenRAG default ingestion instead of Docling serve",id:"use-openrag-default-ingestion-instead-of-docling-serve",level:2},{value:"Knowledge ingestion flows",id:"knowledge-ingestion-flows",level:2},...r.RM,{value:"OpenSearch URL Ingestion flow",id:"url-flow",level:3}];function h(e){const n={a:"a",br:"br",code:"code",h2:"h2",h3:"h3",li:"li",p:"p",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,t.jsxs)(t.Fragment,{children:[(0,t.jsxs)(n.p,{children:["OpenRAG uses ",(0,t.jsx)(n.a,{href:"https://docling-project.github.io/docling/",children:"Docling"})," for its document ingestion pipeline.\nMore specifically, OpenRAG uses ",(0,t.jsx)(n.a,{href:"https://github.com/docling-project/docling-serve",children:"Docling Serve"}),", which starts a ",(0,t.jsx)(n.code,{children:"docling serve"})," process on your local machine and runs Docling ingestion through an API service."]}),"\n",(0,t.jsxs)(n.p,{children:["Docling ingests documents from your local machine or OAuth connectors, splits them into chunks, and stores them as separate, structured documents in the OpenSearch ",(0,t.jsx)(n.code,{children:"documents"})," index."]}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG chose Docling for its support for a wide variety of file formats, high performance, and advanced understanding of tables and images."}),"\n",(0,t.jsx)(n.h2,{id:"docling-ingestion-settings",children:"Docling ingestion settings"}),"\n",(0,t.jsx)(n.p,{children:"These settings configure the Docling ingestion parameters."}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG will warn you if ",(0,t.jsx)(n.code,{children:"docling serve"})," is not running.\nTo start or stop ",(0,t.jsx)(n.code,{children:"docling serve"})," or any other native services, in the TUI main menu, click ",(0,t.jsx)(n.strong,{children:"Start Native Services"})," or ",(0,t.jsx)(n.strong,{children:"Stop Native Services"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Embedding model"})," determines which AI model is used to create vector embeddings. The default is ",(0,t.jsx)(n.code,{children:"text-embedding-3-small"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Chunk size"})," determines how large each text chunk is in number of characters.\nLarger chunks yield more context per chunk, but may include irrelevant information. Smaller chunks yield more precise semantic search, but may lack context.\nThe default value of ",(0,t.jsx)(n.code,{children:"1000"})," characters provides a good starting point that balances these considerations."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Chunk overlap"})," controls the number of characters that overlap over chunk boundaries.\nUse larger overlap values for documents where context is most important, and use smaller overlap values for simpler documents, or when optimization is most important.\nThe default value of 200 characters of overlap with a chunk size of 1000 (20% overlap) is suitable for general use cases. Decrease the overlap to 10% for a more efficient pipeline, or increase to 40% for more complex documents."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"OCR"})," enables or disabled OCR processing when extracting text from images and scanned documents.\nOCR is disabled by default. This setting is best suited for processing text-based documents as quickly as possible with Docling's ",(0,t.jsx)(n.a,{href:"https://docling-project.github.io/docling/reference/document_converter/",children:(0,t.jsx)(n.code,{children:"DocumentConverter"})}),". Images are ignored and not processed."]}),"\n",(0,t.jsx)(n.p,{children:"Enable OCR when you are processing documents containing images with text that requires extraction, or for scanned documents. Enabling OCR can slow ingestion performance."}),"\n",(0,t.jsxs)(n.p,{children:["If OpenRAG detects that the local machine is running on macOS, OpenRAG uses the ",(0,t.jsx)(n.a,{href:"https://www.piwheels.org/project/ocrmac/",children:"ocrmac"})," OCR engine. Other platforms use ",(0,t.jsx)(n.a,{href:"https://www.jaided.ai/easyocr/",children:"easyocr"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Picture descriptions"})," adds image descriptions generated by the ",(0,t.jsx)(n.a,{href:"https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct",children:"SmolVLM-256M-Instruct"})," model to OCR processing. Enabling picture descriptions can slow ingestion performance."]}),"\n",(0,t.jsx)(n.h2,{id:"use-openrag-default-ingestion-instead-of-docling-serve",children:"Use OpenRAG default ingestion instead of Docling serve"}),"\n",(0,t.jsxs)(n.p,{children:["If you want to use OpenRAG's built-in pipeline instead of Docling serve, set ",(0,t.jsx)(n.code,{children:"DISABLE_INGEST_WITH_LANGFLOW=true"})," in ",(0,t.jsx)(n.a,{href:"/reference/configuration#document-processing",children:"Environment variables"}),"."]}),"\n",(0,t.jsx)(n.p,{children:"The built-in pipeline still uses the Docling processor, but uses it directly without the Docling Serve API."}),"\n",(0,t.jsxs)(n.p,{children:["For more information, see ",(0,t.jsxs)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/models/processors.py#L58",children:[(0,t.jsx)(n.code,{children:"processors.py"})," in the OpenRAG repository"]}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"knowledge-ingestion-flows",children:"Knowledge ingestion flows"}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docs.langflow.org/concepts-overview",children:"Flows"})," in Langflow are functional representations of application workflows, with multiple ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/concepts-components",children:"component"})," nodes connected as single steps in a workflow."]}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow is the default knowledge ingestion flow in OpenRAG: when you ",(0,t.jsx)(n.strong,{children:"Add Knowledge"})," in OpenRAG, you run the OpenSearch Ingestion flow in the background. The flow ingests documents using ",(0,t.jsx)(n.strong,{children:"Docling Serve"})," to import and process documents."]}),"\n",(0,t.jsx)(n.p,{children:"This flow contains ten components connected together to process and store documents in your knowledge base."}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-docling",children:[(0,t.jsx)(n.strong,{children:"Docling Serve"})," component"]})," processes input documents by connecting to your instance of Docling Serve."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-docling",children:[(0,t.jsx)(n.strong,{children:"Export DoclingDocument"})," component"]})," exports the processed DoclingDocument to markdown format with image export mode set to placeholder. This conversion makes the structured document data into a standardized format for further processing."]}),"\n",(0,t.jsxs)(n.li,{children:["Three ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#dataframe-operations",children:[(0,t.jsx)(n.strong,{children:"DataFrame Operations"})," components"]})," sequentially add metadata columns to the document data of ",(0,t.jsx)(n.code,{children:"filename"}),", ",(0,t.jsx)(n.code,{children:"file_size"}),", and ",(0,t.jsx)(n.code,{children:"mimetype"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#split-text",children:[(0,t.jsx)(n.strong,{children:"Split Text"})," component"]})," splits the processed text into chunks with a chunk size of 1000 characters and an overlap of 200 characters."]}),"\n",(0,t.jsxs)(n.li,{children:["Four ",(0,t.jsx)(n.strong,{children:"Secret Input"})," components provide secure access to configuration variables: ",(0,t.jsx)(n.code,{children:"CONNECTOR_TYPE"}),", ",(0,t.jsx)(n.code,{children:"OWNER"}),", ",(0,t.jsx)(n.code,{children:"OWNER_EMAIL"}),", and ",(0,t.jsx)(n.code,{children:"OWNER_NAME"}),". These are runtime variables populated from OAuth login."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsx)(n.strong,{children:"Create Data"})," component combines the secret inputs into a structured data object that will be associated with the document embeddings."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,t.jsx)(n.strong,{children:"Embedding Model"})," component"]})," generates vector embeddings using OpenAI's ",(0,t.jsx)(n.code,{children:"text-embedding-3-small"})," model. The embedding model is selected at [Application onboarding] and cannot be changed."]}),"\n",(0,t.jsxs)(n.li,{children:["The ",(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,t.jsx)(n.strong,{children:"OpenSearch"})," component"]})," stores the processed documents and their embeddings in the ",(0,t.jsx)(n.code,{children:"documents"})," index at ",(0,t.jsx)(n.code,{children:"https://opensearch:9200"}),". By default, the component is authenticated with a JWT token, but you can also select ",(0,t.jsx)(n.code,{children:"basic"})," auth mode, and enter your OpenSearch admin username and password."]}),"\n"]}),"\n",(0,t.jsx)(r.Ay,{}),"\n",(0,t.jsx)(n.h3,{id:"url-flow",children:"OpenSearch URL Ingestion flow"}),"\n",(0,t.jsxs)(n.p,{children:["An additional knowledge ingestion flow is included in OpenRAG, where it is used as an MCP tool by the ",(0,t.jsx)(n.a,{href:"/agents#flow",children:(0,t.jsx)(n.strong,{children:"Open Search Agent flow"})}),".\nThe agent calls this component to fetch web content, and the results are ingested into OpenSearch."]}),"\n",(0,t.jsxs)(n.p,{children:["For more on using MCP clients in Langflow, see ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-client",children:"MCP clients"}),".",(0,t.jsx)(n.br,{}),"\n","To connect additional MCP servers to the MCP client, see ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-tutorial",children:"Connect to MCP servers from your application"}),"."]})]})}function p(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(h,{...e})}):h(e)}}}]); \ No newline at end of file diff --git a/assets/js/d0314b07.068d04d8.js b/assets/js/d0314b07.068d04d8.js deleted file mode 100644 index b743c880..00000000 --- a/assets/js/d0314b07.068d04d8.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[5750],{887:(e,n,s)=>{s.d(n,{Ay:()=>c,RM:()=>l});var r=s(4848),i=s(8453),t=s(1470),o=s(9365);const l=[{value:"Application onboarding",id:"application-onboarding",level:2}];function a(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,r.jsxs)(n.p,{children:["The first time you start OpenRAG, whether using the TUI or a ",(0,r.jsx)(n.code,{children:".env"})," file, you must complete application onboarding."]}),"\n",(0,r.jsxs)(n.p,{children:["Values from onboarding can be changed later in the OpenRAG ",(0,r.jsx)(n.strong,{children:"Settings"})," page."]}),"\n",(0,r.jsx)(n.p,{children:"Choose one LLM provider and complete only those steps:"}),"\n",(0,r.jsxs)(t.A,{groupId:"Provider",children:[(0,r.jsx)(o.A,{value:"OpenAI",label:"OpenAI",default:!0,children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsx)(o.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Complete the fields for ",(0,r.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,r.jsx)(n.strong,{children:"IBM API key"}),", and ",(0,r.jsx)(n.strong,{children:"IBM Project ID"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsxs)(o.A,{value:"Ollama",label:"Ollama",children:[(0,r.jsx)(n.admonition,{type:"tip",children:(0,r.jsxs)(n.p,{children:["Ollama is not included with OpenRAG. To install Ollama, see the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG automatically transforms ",(0,r.jsx)(n.code,{children:"localhost"})," to access services outside of the container, and sends a test connection to your Ollama server to confirm connectivity."]}),"\n",(0,r.jsxs)(n.li,{children:["Select the ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.\nOpenRAG retrieves the available models from your Ollama server."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function c(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(a,{...e})}):a(e)}},4398:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>h,contentTitle:()=>d,default:()=>x,frontMatter:()=>c,metadata:()=>r,toc:()=>p});const r=JSON.parse('{"id":"get-started/install","title":"Install OpenRAG with TUI","description":"Install the OpenRAG Python wheel, and then run the OpenRAG Terminal User Interface(TUI) to start your OpenRAG deployment with a guided setup process.","source":"@site/docs/get-started/install.mdx","sourceDirName":"get-started","slug":"/install","permalink":"/install","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/install.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG with TUI","slug":"/install"},"sidebar":"tutorialSidebar","previous":{"title":"About OpenRAG","permalink":"/"},"next":{"title":"Install OpenRAG containers","permalink":"/get-started/docker"}}');var i=s(4848),t=s(8453),o=s(1470),l=s(9365),a=s(887);const c={title:"Install OpenRAG with TUI",slug:"/install"},d=void 0,h={},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Install the OpenRAG Python wheel",id:"install-python-wheel",level:2},{value:"Set up OpenRAG with the TUI",id:"setup",level:2},...a.RM,{value:"Manage OpenRAG containers with the TUI",id:"manage-openrag-containers-with-the-tui",level:2},{value:"Start container services",id:"start-container-services",level:3},{value:"Start native services",id:"start-native-services",level:3},{value:"Status",id:"status",level:3},{value:"Diagnostics",id:"diagnostics",level:2}];function u(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",img:"img",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,t.R)(),...e.components},{Details:r}=n;return r||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,i.jsxs)(i.Fragment,{children:[(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"#install-python-wheel",children:"Install the OpenRAG Python wheel"}),", and then run the ",(0,i.jsx)(n.a,{href:"#setup",children:"OpenRAG Terminal User Interface(TUI)"})," to start your OpenRAG deployment with a guided setup process."]}),"\n",(0,i.jsx)(n.p,{children:"The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal, on any operating system."}),"\n",(0,i.jsx)(n.p,{children:(0,i.jsx)(n.img,{alt:"OpenRAG TUI Interface",src:s(5689).A+"",width:"1995",height:"1099"})}),"\n",(0,i.jsxs)(n.p,{children:["Instead of starting OpenRAG using Docker commands and manually editing values in the ",(0,i.jsx)(n.code,{children:".env"})," file, the TUI walks you through the setup. It prompts for variables where required, creates a ",(0,i.jsx)(n.code,{children:".env"})," file for you, and then starts OpenRAG."]}),"\n",(0,i.jsx)(n.p,{children:"Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs."}),"\n",(0,i.jsxs)(n.p,{children:["If you prefer running Podman or Docker containers and manually editing ",(0,i.jsx)(n.code,{children:".env"})," files, see ",(0,i.jsx)(n.a,{href:"/get-started/docker",children:"Install OpenRAG Containers"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python Version 3.10 to 3.13"})]}),"\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"})]}),"\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"})]}),"\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". If using Podman, use ",(0,i.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:"podman-compose"})," or alias Docker compose commands to Podman commands."]}),"\n",(0,i.jsxs)(n.li,{children:["Create an ",(0,i.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),". This key is ",(0,i.jsx)(n.strong,{children:"required"})," to start OpenRAG, but you can choose a different model provider during ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,i.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment."]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"install-python-wheel",children:"Install the OpenRAG Python wheel"}),"\n",(0,i.jsx)(n.p,{children:"The OpenRAG wheel installs the Terminal User Interface (TUI) for configuring and running OpenRAG."}),"\n",(0,i.jsxs)(n.p,{children:["To quickly install and start OpenRAG, run ",(0,i.jsx)(n.code,{children:"uvx openrag"}),"."]}),"\n",(0,i.jsx)(n.p,{children:"To first set up a project and then install OpenRAG, do the following:"}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Create a new project with a virtual environment using ",(0,i.jsx)(n.code,{children:"uv init"}),"."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv init YOUR_PROJECT_NAME\ncd YOUR_PROJECT_NAME\n"})}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"(venv)"})," prompt doesn't change, but ",(0,i.jsx)(n.code,{children:"uv"})," commands will automatically use the project's virtual environment.\nFor more information on virtual environments, see the ",(0,i.jsx)(n.a,{href:"https://docs.astral.sh/uv/pip/environments",children:"uv documentation"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Ensure all dependencies are installed and updated in your virtual environment."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv sync\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Install and start the OpenRAG TUI."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx openrag\n"})}),"\n",(0,i.jsxs)(n.p,{children:["To install a specific version of the Langflow package, add the required version to the command, such as ",(0,i.jsx)(n.code,{children:"uvx --from openrag==0.1.25 openrag"}),"."]}),"\n",(0,i.jsxs)(r,{closed:!0,children:[(0,i.jsx)("summary",{children:"Install a local wheel without uvx"}),(0,i.jsx)(n.p,{children:"If you downloaded the OpenRAG wheel to your local machine, follow these steps:"}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Add the wheel to your project's virtual environment."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add PATH/TO/openrag-VERSION-py3-none-any.whl\n"})}),"\n",(0,i.jsxs)(n.p,{children:["Replace ",(0,i.jsx)(n.code,{children:"PATH/TO/"})," and ",(0,i.jsx)(n.code,{children:"VERSION"})," with the path and version of your downloaded OpenRAG ",(0,i.jsx)(n.code,{children:".whl"})," file."]}),"\n",(0,i.jsxs)(n.p,{children:["For example, if your ",(0,i.jsx)(n.code,{children:".whl"})," file is in the ",(0,i.jsx)(n.code,{children:"~/Downloads"})," directory:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add ~/Downloads/openrag-0.1.8-py3-none-any.whl\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#setup",children:"Set up OpenRAG with the TUI"}),"."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"setup",children:"Set up OpenRAG with the TUI"}),"\n",(0,i.jsxs)(n.p,{children:["The TUI creates a ",(0,i.jsx)(n.code,{children:".env"})," file in your OpenRAG directory root and starts OpenRAG.\nIf the TUI detects a ",(0,i.jsx)(n.code,{children:".env"})," file in the OpenRAG root directory, it sources any variables from the ",(0,i.jsx)(n.code,{children:".env"})," file.\nIf the TUI detects OAuth credentials, it enforces the ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," path."]}),"\n",(0,i.jsxs)(o.A,{groupId:"Setup method",children:[(0,i.jsxs)(l.A,{value:"Basic setup",label:"Basic setup",default:!0,children:[(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Basic Setup"})," generates all of the required values for OpenRAG except the OpenAI API key.\n",(0,i.jsx)(n.strong,{children:"Basic Setup"})," does not set up OAuth connections for ingestion from cloud providers.\nFor OAuth setup, use ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),".\nFor information about the difference between basic (no auth) and OAuth in OpenRAG, see ",(0,i.jsx)(n.a,{href:"/knowledge#auth",children:"Authentication and document access"}),"."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Basic Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," or press ",(0,i.jsx)("kbd",{children:"1"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow."]}),"\n",(0,i.jsx)(n.li,{children:"Paste your OpenAI API key in the OpenAI API key field."}),"\n",(0,i.jsxs)(n.li,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),".\nYour passwords are saved in the ",(0,i.jsx)(n.code,{children:".env"})," file used to start OpenRAG."]}),"\n",(0,i.jsxs)(n.li,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start Container Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:","\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["To open the OpenRAG application, click ",(0,i.jsx)(n.strong,{children:"Open App"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n"]})]}),(0,i.jsx)(l.A,{value:"Advanced setup",label:"Advanced setup",children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," or press ",(0,i.jsx)("kbd",{children:"2"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Paste your OpenAI API key in the OpenAI API key field."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Add your client and secret values for Google or Microsoft OAuth.\nThese values can be found with your OAuth provider.\nFor more information, see the ",(0,i.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client"})," or ",(0,i.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client"})," documentation."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"The OpenRAG TUI presents redirect URIs for your OAuth app.\nThese are the URLs your OAuth provider will redirect back to after user sign-in.\nRegister these redirect values with your OAuth provider as they are presented in the TUI."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start Container Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To open the OpenRAG application, click ",(0,i.jsx)(n.strong,{children:"Open App"}),", press ",(0,i.jsx)("kbd",{children:"6"}),", or navigate to ",(0,i.jsx)(n.code,{children:"http://localhost:3000"}),".\nYou are presented with your provider's OAuth sign-in screen.\nAfter sign-in, you are redirected to the redirect URI."]}),"\n",(0,i.jsx)(n.p,{children:"Two additional variables are available for Advanced Setup:"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"LANGFLOW_PUBLIC_URL"})," controls where the Langflow web interface can be accessed. This is where users interact with their flows in a browser."]}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"WEBHOOK_BASE_URL"})," controls where the endpoint for ",(0,i.jsx)(n.code,{children:"/connectors/CONNECTOR_TYPE/webhook"})," will be available.\nThis connection enables real-time document synchronization with external services.\nSupported webhook endpoints:"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["Google Drive: ",(0,i.jsx)(n.code,{children:"/connectors/google_drive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["OneDrive: ",(0,i.jsx)(n.code,{children:"/connectors/onedrive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["SharePoint: ",(0,i.jsx)(n.code,{children:"/connectors/sharepoint/webhook"})]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n"]}),"\n"]})})]}),"\n",(0,i.jsx)(a.Ay,{}),"\n",(0,i.jsx)(n.h2,{id:"manage-openrag-containers-with-the-tui",children:"Manage OpenRAG containers with the TUI"}),"\n",(0,i.jsx)(n.p,{children:"After installation, the TUI can deploy, manage, and upgrade your OpenRAG containers."}),"\n",(0,i.jsx)(n.h3,{id:"start-container-services",children:"Start container services"}),"\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Start Container Services"})," to start the OpenRAG containers.\nThe TUI automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for ",(0,i.jsx)(n.code,{children:"CUDA"}),", ",(0,i.jsx)(n.code,{children:"NVIDIA_SMI"}),", and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses.\nThe TUI then pulls the images and deploys the containers with the following command."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"docker compose up -d\n"})}),"\n",(0,i.jsxs)(n.p,{children:["If images are missing, the TUI runs ",(0,i.jsx)(n.code,{children:"docker compose pull"}),", then runs ",(0,i.jsx)(n.code,{children:"docker compose up -d"}),"."]}),"\n",(0,i.jsx)(n.h3,{id:"start-native-services",children:"Start native services"}),"\n",(0,i.jsxs)(n.p,{children:['A "native" service in OpenRAG refers to a service run natively on your machine, and not within a container.\nThe ',(0,i.jsx)(n.code,{children:"docling serve"})," process is a native service in OpenRAG, because it's a document processing service that is run on your local machine, and controlled separately from the containers."]}),"\n",(0,i.jsxs)(n.p,{children:["To start or stop ",(0,i.jsx)(n.code,{children:"docling serve"})," or any other native services, in the TUI main menu, click ",(0,i.jsx)(n.strong,{children:"Start Native Services"})," or ",(0,i.jsx)(n.strong,{children:"Stop Native Services"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To view the status, port, or PID of a native service, in the TUI main menu, click ",(0,i.jsx)(n.a,{href:"#status",children:"Status"}),"."]}),"\n",(0,i.jsx)(n.h3,{id:"status",children:"Status"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Status"})," menu displays information on your container deployment.\nHere you can check container health, find your service ports, view logs, and upgrade your containers."]}),"\n",(0,i.jsxs)(n.p,{children:["To view streaming logs, select the container you want to view, and press ",(0,i.jsx)("kbd",{children:"l"}),".\nTo copy your logs, click ",(0,i.jsx)(n.strong,{children:"Copy to Clipboard"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To ",(0,i.jsx)(n.strong,{children:"upgrade"})," your containers, click ",(0,i.jsx)(n.strong,{children:"Upgrade"}),".\n",(0,i.jsx)(n.strong,{children:"Upgrade"})," runs ",(0,i.jsx)(n.code,{children:"docker compose pull"})," and then ",(0,i.jsx)(n.code,{children:"docker compose up -d --force-recreate"}),".\nThe first command pulls the latest images of OpenRAG.\nThe second command recreates the containers with your data persisted."]}),"\n",(0,i.jsxs)(n.p,{children:["To ",(0,i.jsx)(n.strong,{children:"reset"})," your containers, click ",(0,i.jsx)(n.strong,{children:"Reset"}),".\nReset gives you a completely fresh start.\nReset deletes all of your data, including OpenSearch data, uploaded documents, and authentication.\n",(0,i.jsx)(n.strong,{children:"Reset"})," runs two commands.\nIt first stops and removes all containers, volumes, and local images."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{children:"docker compose down --volumes --remove-orphans --rmi local\n"})}),"\n",(0,i.jsxs)(n.p,{children:["When the first command is complete, OpenRAG removes any additional Docker objects with ",(0,i.jsx)(n.code,{children:"prune"}),"."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{children:"docker system prune -f\n"})}),"\n",(0,i.jsx)(n.h2,{id:"diagnostics",children:"Diagnostics"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Diagnostics"})," menu provides health monitoring for your container runtimes and monitoring of your OpenSearch security."]})]})}function x(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(u,{...e})}):u(e)}},5689:(e,n,s)=>{s.d(n,{A:()=>r});const r=s.p+"assets/images/OpenRAG_TUI_2025-09-10T13_04_11_757637-9441c53ba39162a88ac6c11cbeaed0e0.svg"}}]); \ No newline at end of file diff --git a/assets/js/d0314b07.9ed76f3e.js b/assets/js/d0314b07.9ed76f3e.js new file mode 100644 index 00000000..e71531fb --- /dev/null +++ b/assets/js/d0314b07.9ed76f3e.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[5750],{887:(e,n,s)=>{s.d(n,{Ay:()=>c,RM:()=>l});var r=s(4848),i=s(8453),t=s(1470),o=s(9365);const l=[{value:"Application onboarding",id:"application-onboarding",level:2}];function a(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,r.jsxs)(n.p,{children:["The first time you start OpenRAG, whether using the TUI or a ",(0,r.jsx)(n.code,{children:".env"})," file, it's recommended that you complete application onboarding."]}),"\n",(0,r.jsxs)(n.p,{children:["To skip onboarding, click ",(0,r.jsx)(n.strong,{children:"Skip onboarding"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:["Values from onboarding can be changed later in the OpenRAG ",(0,r.jsx)(n.strong,{children:"Settings"})," page."]}),"\n",(0,r.jsx)(n.p,{children:"Choose one LLM provider and complete only those steps:"}),"\n",(0,r.jsxs)(t.A,{groupId:"Provider",children:[(0,r.jsx)(o.A,{value:"OpenAI",label:"OpenAI",default:!0,children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsx)(o.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Complete the fields for ",(0,r.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,r.jsx)(n.strong,{children:"IBM Project ID"}),", and ",(0,r.jsx)(n.strong,{children:"IBM API key"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsxs)(o.A,{value:"Ollama",label:"Ollama",children:[(0,r.jsx)(n.admonition,{type:"tip",children:(0,r.jsxs)(n.p,{children:["Ollama is not included with OpenRAG. To install Ollama, see the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG automatically transforms ",(0,r.jsx)(n.code,{children:"localhost"})," to access services outside of the container, and sends a test connection to your Ollama server to confirm connectivity."]}),"\n",(0,r.jsxs)(n.li,{children:["Select the ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.\nOpenRAG retrieves the available models from your Ollama server."]}),"\n",(0,r.jsxs)(n.li,{children:["To load 2 sample PDFs, enable ",(0,r.jsx)(n.strong,{children:"Sample dataset"}),".\nThis is recommended, but not required."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function c(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(a,{...e})}):a(e)}},4398:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>h,contentTitle:()=>d,default:()=>x,frontMatter:()=>c,metadata:()=>r,toc:()=>p});const r=JSON.parse('{"id":"get-started/install","title":"Install OpenRAG with TUI","description":"Install the OpenRAG Python wheel, and then run the OpenRAG Terminal User Interface(TUI) to start your OpenRAG deployment with a guided setup process.","source":"@site/docs/get-started/install.mdx","sourceDirName":"get-started","slug":"/install","permalink":"/install","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/install.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG with TUI","slug":"/install"},"sidebar":"tutorialSidebar","previous":{"title":"About OpenRAG","permalink":"/"},"next":{"title":"Install OpenRAG containers","permalink":"/get-started/docker"}}');var i=s(4848),t=s(8453),o=s(1470),l=s(9365),a=s(887);const c={title:"Install OpenRAG with TUI",slug:"/install"},d=void 0,h={},p=[{value:"Prerequisites",id:"prerequisites",level:2},{value:"Install the OpenRAG Python wheel",id:"install-python-wheel",level:2},{value:"Set up OpenRAG with the TUI",id:"setup",level:2},...a.RM,{value:"Manage OpenRAG containers with the TUI",id:"manage-openrag-containers-with-the-tui",level:2},{value:"Start all services",id:"start-all-services",level:3},{value:"Status",id:"status",level:3},{value:"Native services status",id:"native-services-status",level:3},{value:"Diagnostics",id:"diagnostics",level:2}];function u(e){const n={a:"a",code:"code",em:"em",h2:"h2",h3:"h3",img:"img",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,t.R)(),...e.components},{Details:r}=n;return r||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,i.jsxs)(i.Fragment,{children:[(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"#install-python-wheel",children:"Install the OpenRAG Python wheel"}),", and then run the ",(0,i.jsx)(n.a,{href:"#setup",children:"OpenRAG Terminal User Interface(TUI)"})," to start your OpenRAG deployment with a guided setup process."]}),"\n",(0,i.jsx)(n.p,{children:"The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal."}),"\n",(0,i.jsx)(n.p,{children:(0,i.jsx)(n.img,{alt:"OpenRAG TUI Interface",src:s(5689).A+"",width:"1995",height:"1099"})}),"\n",(0,i.jsxs)(n.p,{children:["Instead of starting OpenRAG using Docker commands and manually editing values in the ",(0,i.jsx)(n.code,{children:".env"})," file, the TUI walks you through the setup. It prompts for variables where required, creates a ",(0,i.jsx)(n.code,{children:".env"})," file for you, and then starts OpenRAG."]}),"\n",(0,i.jsx)(n.p,{children:"Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs."}),"\n",(0,i.jsxs)(n.p,{children:["If you prefer running Podman or Docker containers and manually editing ",(0,i.jsx)(n.code,{children:".env"})," files, see ",(0,i.jsx)(n.a,{href:"/get-started/docker",children:"Install OpenRAG Containers"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python Version 3.10 to 3.13"})]}),"\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"})]}),"\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"})]}),"\n",(0,i.jsxs)(n.li,{children:["Install ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". If using Podman, use ",(0,i.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:"podman-compose"})," or alias Docker compose commands to Podman commands."]}),"\n",(0,i.jsxs)(n.li,{children:["Create an ",(0,i.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),". This key is ",(0,i.jsx)(n.strong,{children:"required"})," to start OpenRAG, but you can choose a different model provider during ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,i.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment."]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"install-python-wheel",children:"Install the OpenRAG Python wheel"}),"\n",(0,i.jsx)(n.p,{children:"The OpenRAG wheel installs the Terminal User Interface (TUI) for configuring and running OpenRAG."}),"\n",(0,i.jsxs)(n.p,{children:["To quickly install and start OpenRAG, run ",(0,i.jsx)(n.code,{children:"uvx openrag"}),"."]}),"\n",(0,i.jsx)(n.p,{children:"To first set up a project and then install OpenRAG, do the following:"}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Create a new project with a virtual environment using ",(0,i.jsx)(n.code,{children:"uv init"}),"."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv init YOUR_PROJECT_NAME\ncd YOUR_PROJECT_NAME\n"})}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"(venv)"})," prompt doesn't change, but ",(0,i.jsx)(n.code,{children:"uv"})," commands will automatically use the project's virtual environment.\nFor more information on virtual environments, see the ",(0,i.jsx)(n.a,{href:"https://docs.astral.sh/uv/pip/environments",children:"uv documentation"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Ensure all dependencies are installed and updated in your virtual environment."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv sync\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Install and start the OpenRAG TUI."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx openrag\n"})}),"\n",(0,i.jsxs)(n.p,{children:["To install a specific version of the Langflow package, add the required version to the command, such as ",(0,i.jsx)(n.code,{children:"uvx --from openrag==0.1.25 openrag"}),"."]}),"\n",(0,i.jsxs)(r,{closed:!0,children:[(0,i.jsx)("summary",{children:"Install a local wheel without uvx"}),(0,i.jsx)(n.p,{children:"If you downloaded the OpenRAG wheel to your local machine, follow these steps:"}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Add the wheel to your project's virtual environment."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add PATH/TO/openrag-VERSION-py3-none-any.whl\n"})}),"\n",(0,i.jsxs)(n.p,{children:["Replace ",(0,i.jsx)(n.code,{children:"PATH/TO/"})," and ",(0,i.jsx)(n.code,{children:"VERSION"})," with the path and version of your downloaded OpenRAG ",(0,i.jsx)(n.code,{children:".whl"})," file."]}),"\n",(0,i.jsxs)(n.p,{children:["For example, if your ",(0,i.jsx)(n.code,{children:".whl"})," file is in the ",(0,i.jsx)(n.code,{children:"~/Downloads"})," directory:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add ~/Downloads/openrag-0.1.8-py3-none-any.whl\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#setup",children:"Set up OpenRAG with the TUI"}),"."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"setup",children:"Set up OpenRAG with the TUI"}),"\n",(0,i.jsxs)(n.p,{children:["The TUI creates a ",(0,i.jsx)(n.code,{children:".env"})," file in your OpenRAG directory root and starts OpenRAG.\nIf the TUI detects a ",(0,i.jsx)(n.code,{children:".env"})," file in the OpenRAG root directory, it sources any variables from the ",(0,i.jsx)(n.code,{children:".env"})," file.\nIf the TUI detects OAuth credentials, it enforces the ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," path."]}),"\n",(0,i.jsxs)(o.A,{groupId:"Setup method",children:[(0,i.jsxs)(l.A,{value:"Basic setup",label:"Basic setup",default:!0,children:[(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Basic Setup"})," generates all of the required values for OpenRAG except the OpenAI API key.\n",(0,i.jsx)(n.strong,{children:"Basic Setup"})," does not set up OAuth connections for ingestion from cloud providers.\nFor OAuth setup, use ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),".\nFor information about the difference between basic (no auth) and OAuth in OpenRAG, see ",(0,i.jsx)(n.a,{href:"/knowledge#auth",children:"Authentication and document access"}),"."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Basic Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," or press ",(0,i.jsx)("kbd",{children:"1"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow.\nOnly the ",(0,i.jsx)(n.strong,{children:"OpenSearch Admin Password"})," and ",(0,i.jsx)(n.strong,{children:"OpenAI API key"})," are required.\nTo generate the optional ",(0,i.jsx)(n.strong,{children:"Langflow Admin Password"}),", click ",(0,i.jsx)(n.strong,{children:"Generate Password"}),"."]}),"\n",(0,i.jsx)(n.li,{children:"Paste your OpenAI API key in the OpenAI API key field."}),"\n",(0,i.jsxs)(n.li,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),".\nYour passwords are saved in the ",(0,i.jsx)(n.code,{children:".env"})," file used to start OpenRAG."]}),"\n",(0,i.jsxs)(n.li,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:","\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["To open the OpenRAG application, click ",(0,i.jsx)(n.strong,{children:"Open App"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n"]})]}),(0,i.jsx)(l.A,{value:"Advanced setup",label:"Advanced setup",children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," or press ",(0,i.jsx)("kbd",{children:"2"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Paste your OpenAI API key in the OpenAI API key field."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Add your client and secret values for Google or Microsoft OAuth.\nThese values can be found with your OAuth provider.\nFor more information, see the ",(0,i.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client"})," or ",(0,i.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client"})," documentation."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"The OpenRAG TUI presents redirect URIs for your OAuth app.\nThese are the URLs your OAuth provider will redirect back to after user sign-in.\nRegister these redirect values with your OAuth provider as they are presented in the TUI."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To open the OpenRAG application, click ",(0,i.jsx)(n.strong,{children:"Open App"}),".\nYou are presented with your provider's OAuth sign-in screen.\nAfter sign-in, you are redirected to the redirect URI."]}),"\n",(0,i.jsx)(n.p,{children:"Two additional variables are available for Advanced Setup:"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"LANGFLOW_PUBLIC_URL"})," controls where the Langflow web interface can be accessed. This is where users interact with their flows in a browser."]}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"WEBHOOK_BASE_URL"})," controls where the endpoint for ",(0,i.jsx)(n.code,{children:"/connectors/CONNECTOR_TYPE/webhook"})," will be available.\nThis connection enables real-time document synchronization with external services.\nSupported webhook endpoints:"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["Google Drive: ",(0,i.jsx)(n.code,{children:"/connectors/google_drive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["OneDrive: ",(0,i.jsx)(n.code,{children:"/connectors/onedrive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["SharePoint: ",(0,i.jsx)(n.code,{children:"/connectors/sharepoint/webhook"})]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"Application Onboarding"}),"."]}),"\n"]}),"\n"]})})]}),"\n",(0,i.jsx)(a.Ay,{}),"\n",(0,i.jsx)(n.h2,{id:"manage-openrag-containers-with-the-tui",children:"Manage OpenRAG containers with the TUI"}),"\n",(0,i.jsx)(n.p,{children:"After installation, the TUI can deploy, manage, and upgrade your OpenRAG containers."}),"\n",(0,i.jsx)(n.h3,{id:"start-all-services",children:"Start all services"}),"\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Start All Services"})," to start the OpenRAG containers.\nThe TUI automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for ",(0,i.jsx)(n.code,{children:"CUDA"}),", ",(0,i.jsx)(n.code,{children:"NVIDIA_SMI"}),", and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses.\nThe TUI then pulls the images and deploys the containers with the following command."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"docker compose up -d\n"})}),"\n",(0,i.jsxs)(n.p,{children:["If images are missing, the TUI runs ",(0,i.jsx)(n.code,{children:"docker compose pull"}),", then runs ",(0,i.jsx)(n.code,{children:"docker compose up -d"}),"."]}),"\n",(0,i.jsx)(n.h3,{id:"status",children:"Status"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Status"})," menu displays information on your container deployment.\nHere you can check container health, find your service ports, view logs, and upgrade your containers."]}),"\n",(0,i.jsxs)(n.p,{children:["To view streaming logs, select the container you want to view, and press ",(0,i.jsx)("kbd",{children:"l"}),".\nTo copy your logs, click ",(0,i.jsx)(n.strong,{children:"Copy to Clipboard"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To ",(0,i.jsx)(n.strong,{children:"upgrade"})," your containers, click ",(0,i.jsx)(n.strong,{children:"Upgrade"}),".\n",(0,i.jsx)(n.strong,{children:"Upgrade"})," runs ",(0,i.jsx)(n.code,{children:"docker compose pull"})," and then ",(0,i.jsx)(n.code,{children:"docker compose up -d --force-recreate"}),".\nThe first command pulls the latest images of OpenRAG.\nThe second command recreates the containers with your data persisted."]}),"\n",(0,i.jsxs)(n.p,{children:["To ",(0,i.jsx)(n.strong,{children:"reset"})," your containers, click ",(0,i.jsx)(n.strong,{children:"Reset"}),".\nReset gives you a completely fresh start.\nReset deletes all of your data, including OpenSearch data, uploaded documents, and authentication.\n",(0,i.jsx)(n.strong,{children:"Reset"})," runs two commands.\nIt first stops and removes all containers, volumes, and local images."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{children:"docker compose down --volumes --remove-orphans --rmi local\n"})}),"\n",(0,i.jsxs)(n.p,{children:["When the first command is complete, OpenRAG removes any additional Docker objects with ",(0,i.jsx)(n.code,{children:"prune"}),"."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{children:"docker system prune -f\n"})}),"\n",(0,i.jsx)(n.h3,{id:"native-services-status",children:"Native services status"}),"\n",(0,i.jsxs)(n.p,{children:["A ",(0,i.jsx)(n.em,{children:"native service"})," in OpenRAG refers to a service run locally on your machine, and not within a container.\nThe ",(0,i.jsx)(n.code,{children:"docling serve"})," process is a native service in OpenRAG, because it's a document processing service that is run on your local machine, and controlled separately from the containers."]}),"\n",(0,i.jsxs)(n.p,{children:["To start or stop ",(0,i.jsx)(n.code,{children:"docling serve"})," or any other native services, in the TUI Status menu, click ",(0,i.jsx)(n.strong,{children:"Stop"})," or ",(0,i.jsx)(n.strong,{children:"Restart"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To view the status, port, or PID of a native service, in the TUI main menu, click ",(0,i.jsx)(n.a,{href:"#status",children:"Status"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"diagnostics",children:"Diagnostics"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Diagnostics"})," menu provides health monitoring for your container runtimes and monitoring of your OpenSearch security."]})]})}function x(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(u,{...e})}):u(e)}},5689:(e,n,s)=>{s.d(n,{A:()=>r});const r=s.p+"assets/images/OpenRAG_TUI_2025-09-10T13_04_11_757637-9441c53ba39162a88ac6c11cbeaed0e0.svg"}}]); \ No newline at end of file diff --git a/assets/js/e633a5ea.380cba07.js b/assets/js/e633a5ea.380cba07.js new file mode 100644 index 00000000..d0848a30 --- /dev/null +++ b/assets/js/e633a5ea.380cba07.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[9172],{9359:(e,n,r)=>{r.r(n),r.d(n,{assets:()=>l,contentTitle:()=>a,default:()=>h,frontMatter:()=>o,metadata:()=>s,toc:()=>c});const s=JSON.parse('{"id":"get-started/what-is-openrag","title":"What is OpenRAG?","description":"OpenRAG is an open-source package for building agentic RAG systems that integrates with a wide range of orchestration tools, vector databases, and LLM providers.","source":"@site/docs/get-started/what-is-openrag.mdx","sourceDirName":"get-started","slug":"/","permalink":"/","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/what-is-openrag.mdx","tags":[],"version":"current","frontMatter":{"title":"What is OpenRAG?","slug":"/"},"sidebar":"tutorialSidebar","next":{"title":"Install OpenRAG with TUI","permalink":"/install"}}');var t=r(4848),i=r(8453);const o={title:"What is OpenRAG?",slug:"/"},a=void 0,l={},c=[{value:"OpenRAG architecture",id:"openrag-architecture",level:2},{value:"Performance expectations",id:"performance-expectations",level:2}];function d(e){const n={a:"a",code:"code",h2:"h2",li:"li",mermaid:"mermaid",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,t.jsxs)(t.Fragment,{children:[(0,t.jsx)(n.p,{children:"OpenRAG is an open-source package for building agentic RAG systems that integrates with a wide range of orchestration tools, vector databases, and LLM providers."}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docs.langflow.org",children:"Langflow"}),": Langflow is a versatile tool for building and deploying AI agents and MCP servers. It supports all major LLMs, vector databases, and a growing library of AI tools."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docs.opensearch.org/latest/",children:"OpenSearch"}),": OpenSearch is a community-driven, Apache 2.0-licensed open source search and analytics suite that makes it easy to ingest, search, visualize, and analyze data."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docling-project.github.io/docling/",children:"Docling"}),": Docling simplifies document processing, parsing diverse formats \u2014 including advanced PDF understanding \u2014 and providing seamless integrations with the gen AI ecosystem."]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG builds on Langflow's familiar interface while adding OpenSearch for vector storage and Docling for simplified document parsing, with opinionated flows that serve as ready-to-use recipes for ingestion, retrieval, and generation from popular sources like Google Drive, OneDrive, and Sharepoint."}),"\n",(0,t.jsx)(n.p,{children:"What's more, every part of the stack is swappable. Write your own custom components in Python, try different language models, and customize your flows to build an agentic RAG system."}),"\n",(0,t.jsxs)(n.p,{children:["Ready to get started? ",(0,t.jsx)(n.a,{href:"/install",children:"Install OpenRAG"})," and then run the ",(0,t.jsx)(n.a,{href:"/quickstart",children:"Quickstart"})," to create a powerful RAG pipeline."]}),"\n",(0,t.jsx)(n.h2,{id:"openrag-architecture",children:"OpenRAG architecture"}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG deploys and orchestrates a lightweight, container-based architecture that combines ",(0,t.jsx)(n.strong,{children:"Langflow"}),", ",(0,t.jsx)(n.strong,{children:"OpenSearch"}),", and ",(0,t.jsx)(n.strong,{children:"Docling"})," into a cohesive RAG platform."]}),"\n",(0,t.jsx)(n.mermaid,{value:'%%{init: {\'theme\': \'dark\', \'flowchart\': {\'useMaxWidth\': false, \'width\': \'100%\'}}}%%\nflowchart LR\n %% Encapsulate the entire diagram in a rectangle with black background\n subgraph DiagramContainer["OpenRAG Architecture"]\n style DiagramContainer fill:#000000,stroke:#ffffff,color:white,stroke-width:2px\n\n %% Define subgraphs for the different sections\n subgraph LocalService["Local Service"]\n DoclingSrv[Docling Serve]\n style DoclingSrv fill:#a8d1ff,stroke:#0066cc,color:black,stroke-width:2px\n end\n\n subgraph Containers\n Backend["OpenRAG Backend"]\n style Backend fill:#e6ffe6,stroke:#006600,color:black,stroke-width:2px\n Langflow\n style Langflow fill:#e6ffe6,stroke:#006600,color:black,stroke-width:2px\n OpenSearch\n style OpenSearch fill:#e6ffe6,stroke:#006600,color:black,stroke-width:2px\n Frontend["OpenRAG Frontend"]\n style Frontend fill:#ffcc99,stroke:#ff6600,color:black,stroke-width:2px\n end\n\n subgraph ThirdParty["Third Party Services"]\n GoogleDrive["Google Drive"]\n style GoogleDrive fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n OneDrive\n style OneDrive fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n SharePoint["SharePoint"]\n style SharePoint fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n More[...]\n style More fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n end\n\n %% Define connections\n DoclingSrv --\x3e Backend\n GoogleDrive --\x3e Backend\n OneDrive --\x3e Backend\n SharePoint --\x3e Backend\n More --\x3e Backend\n Backend --\x3e Langflow\n Langflow <--\x3e OpenSearch\n Backend <--\x3e Frontend\n\n %% Style subgraphs\n style LocalService fill:#333333,stroke:#666666,color:white,stroke-width:2px\n style Containers fill:#444444,stroke:#666666,color:white,stroke-width:2px\n style ThirdParty fill:#333333,stroke:#666666,color:white,stroke-width:2px\n end'}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenRAG Backend"})," is the central orchestration service that coordinates all other components."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Langflow"})," provides a visual workflow engine for building AI agents, and connects to ",(0,t.jsx)(n.strong,{children:"OpenSearch"})," for vector storage and retrieval."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Docling Serve"})," is a local document processing service managed by the ",(0,t.jsx)(n.strong,{children:"OpenRAG Backend"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Third Party Services"})," like ",(0,t.jsx)(n.strong,{children:"Google Drive"})," connect to the ",(0,t.jsx)(n.strong,{children:"OpenRAG Backend"})," through OAuth authentication, allowing synchronication of cloud storage with the OpenSearch knowledge base."]}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenRAG Frontend"})," provides the user interface for interacting with the system."]}),"\n",(0,t.jsx)(n.h2,{id:"performance-expectations",children:"Performance expectations"}),"\n",(0,t.jsx)(n.p,{children:"On a local VM with 7 vCPUs and 8\u202fGiB RAM, OpenRAG ingested approximately 5.03 GB across 1,083 files in about 42 minutes.\nThis equates to approximately 2.4 documents per second."}),"\n",(0,t.jsx)(n.p,{children:"You can generally expect equal or better performance on developer laptops and significantly faster on servers.\nThroughput scales with CPU cores, memory, storage speed, and configuration choices such as embedding model, chunk size and overlap, and concurrency."}),"\n",(0,t.jsx)(n.p,{children:"This test returned 12 errors (approximately 1.1%).\nAll errors were file\u2011specific, and they didn't stop the pipeline."}),"\n",(0,t.jsx)(n.p,{children:"Ingestion dataset:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Total files: 1,083 items mounted"}),"\n",(0,t.jsx)(n.li,{children:"Total size on disk: 5,026,474,862 bytes (approximately 5.03 GB)"}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"Hardware specifications:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Machine: Apple M4 Pro"}),"\n",(0,t.jsxs)(n.li,{children:["Podman VM:","\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["Name: ",(0,t.jsx)(n.code,{children:"podman-machine-default"})]}),"\n",(0,t.jsxs)(n.li,{children:["Type: ",(0,t.jsx)(n.code,{children:"applehv"})]}),"\n",(0,t.jsx)(n.li,{children:"vCPUs: 7"}),"\n",(0,t.jsx)(n.li,{children:"Memory: 8 GiB"}),"\n",(0,t.jsx)(n.li,{children:"Disk size: 100 GiB"}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"Test results:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-text",children:"2025-09-24T22:40:45.542190Z /app/src/main.py:231 Ingesting default documents when ready disable_langflow_ingest=False\n2025-09-24T22:40:45.546385Z /app/src/main.py:270 Using Langflow ingestion pipeline for default documents file_count=1082\n...\n2025-09-24T23:19:44.866365Z /app/src/main.py:351 Langflow ingestion completed success_count=1070 error_count=12 total_files=1082\n"})}),"\n",(0,t.jsx)(n.p,{children:"Elapsed time: ~42 minutes 15 seconds (2,535 seconds)"}),"\n",(0,t.jsx)(n.p,{children:"Throughput: ~2.4 documents/second"})]})}function h(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(d,{...e})}):d(e)}}}]); \ No newline at end of file diff --git a/assets/js/e633a5ea.93136c05.js b/assets/js/e633a5ea.93136c05.js deleted file mode 100644 index 4e9d271d..00000000 --- a/assets/js/e633a5ea.93136c05.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkopenrag_docs=self.webpackChunkopenrag_docs||[]).push([[9172],{9359:(e,n,r)=>{r.r(n),r.d(n,{assets:()=>l,contentTitle:()=>a,default:()=>h,frontMatter:()=>o,metadata:()=>s,toc:()=>c});const s=JSON.parse('{"id":"get-started/what-is-openrag","title":"What is OpenRAG?","description":"OpenRAG is an open-source package for building agentic RAG systems that integrates with a wide range of orchestration tools, vector databases, and LLM providers.","source":"@site/docs/get-started/what-is-openrag.mdx","sourceDirName":"get-started","slug":"/","permalink":"/","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/what-is-openrag.mdx","tags":[],"version":"current","frontMatter":{"title":"What is OpenRAG?","slug":"/"},"sidebar":"tutorialSidebar","next":{"title":"Install OpenRAG with TUI","permalink":"/install"}}');var t=r(4848),i=r(8453);const o={title:"What is OpenRAG?",slug:"/"},a=void 0,l={},c=[{value:"OpenRAG architecture",id:"openrag-architecture",level:2},{value:"Performance expectations",id:"performance-expectations",level:2}];function d(e){const n={a:"a",code:"code",h2:"h2",li:"li",mermaid:"mermaid",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,t.jsxs)(t.Fragment,{children:[(0,t.jsx)(n.p,{children:"OpenRAG is an open-source package for building agentic RAG systems that integrates with a wide range of orchestration tools, vector databases, and LLM providers."}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docs.langflow.org",children:"Langflow"}),": Langflow is a popular tool for building and deploying AI agents and MCP servers. It supports all major LLMs, vector databases, and a growing library of AI tools."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docs.opensearch.org/latest/",children:"OpenSearch"}),": OpenSearch is a community-driven, Apache 2.0-licensed open source search and analytics suite that makes it easy to ingest, search, visualize, and analyze data."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.a,{href:"https://docling-project.github.io/docling/",children:"Docling"}),": Docling simplifies document processing, parsing diverse formats \u2014 including advanced PDF understanding \u2014 and providing seamless integrations with the gen AI ecosystem."]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG builds on Langflow's familiar interface while adding OpenSearch for vector storage and Docling for simplified document parsing, with opinionated flows that serve as ready-to-use recipes for ingestion, retrieval, and generation from popular sources like Google Drive, OneDrive, and Sharepoint."}),"\n",(0,t.jsx)(n.p,{children:"What's more, every part of the stack is swappable. Write your own custom components in Python, try different language models, and customize your flows to build an agentic RAG system."}),"\n",(0,t.jsxs)(n.p,{children:["Ready to get started? ",(0,t.jsx)(n.a,{href:"/install",children:"Install OpenRAG"})," and then run the ",(0,t.jsx)(n.a,{href:"/quickstart",children:"Quickstart"})," to create a powerful RAG pipeline."]}),"\n",(0,t.jsx)(n.h2,{id:"openrag-architecture",children:"OpenRAG architecture"}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG deploys and orchestrates a lightweight, container-based architecture that combines ",(0,t.jsx)(n.strong,{children:"Langflow"}),", ",(0,t.jsx)(n.strong,{children:"OpenSearch"}),", and ",(0,t.jsx)(n.strong,{children:"Docling"})," into a cohesive RAG platform."]}),"\n",(0,t.jsx)(n.mermaid,{value:'%%{init: {\'theme\': \'dark\', \'flowchart\': {\'useMaxWidth\': false, \'width\': \'100%\'}}}%%\nflowchart LR\n %% Encapsulate the entire diagram in a rectangle with black background\n subgraph DiagramContainer["OpenRAG Architecture"]\n style DiagramContainer fill:#000000,stroke:#ffffff,color:white,stroke-width:2px\n\n %% Define subgraphs for the different sections\n subgraph LocalService["Local Service"]\n DoclingSrv[Docling Serve]\n style DoclingSrv fill:#a8d1ff,stroke:#0066cc,color:black,stroke-width:2px\n end\n\n subgraph Containers\n Backend["OpenRAG Backend"]\n style Backend fill:#e6ffe6,stroke:#006600,color:black,stroke-width:2px\n Langflow\n style Langflow fill:#e6ffe6,stroke:#006600,color:black,stroke-width:2px\n OpenSearch\n style OpenSearch fill:#e6ffe6,stroke:#006600,color:black,stroke-width:2px\n Frontend["OpenRAG Frontend"]\n style Frontend fill:#ffcc99,stroke:#ff6600,color:black,stroke-width:2px\n end\n\n subgraph ThirdParty["Third Party Services"]\n GoogleDrive["Google Drive"]\n style GoogleDrive fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n OneDrive\n style OneDrive fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n SharePoint["SharePoint"]\n style SharePoint fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n More[...]\n style More fill:#f2e6ff,stroke:#6600cc,color:black,stroke-width:2px\n end\n\n %% Define connections\n DoclingSrv --\x3e Backend\n GoogleDrive --\x3e Backend\n OneDrive --\x3e Backend\n SharePoint --\x3e Backend\n More --\x3e Backend\n Backend --\x3e Langflow\n Langflow <--\x3e OpenSearch\n Backend <--\x3e Frontend\n\n %% Style subgraphs\n style LocalService fill:#333333,stroke:#666666,color:white,stroke-width:2px\n style Containers fill:#444444,stroke:#666666,color:white,stroke-width:2px\n style ThirdParty fill:#333333,stroke:#666666,color:white,stroke-width:2px\n end'}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenRAG Backend"})," is the central orchestration service that coordinates all other components."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Langflow"})," provides a visual workflow engine for building AI agents, and connects to ",(0,t.jsx)(n.strong,{children:"OpenSearch"})," for vector storage and retrieval."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Docling Serve"})," is a local document processing service managed by the ",(0,t.jsx)(n.strong,{children:"OpenRAG Backend"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Third Party Services"})," like ",(0,t.jsx)(n.strong,{children:"Google Drive"})," connect to the ",(0,t.jsx)(n.strong,{children:"OpenRAG Backend"})," through OAuth authentication, allowing synchronication of cloud storage with the OpenSearch knowledge base."]}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenRAG Frontend"})," provides the user interface for interacting with the system."]}),"\n",(0,t.jsx)(n.h2,{id:"performance-expectations",children:"Performance expectations"}),"\n",(0,t.jsx)(n.p,{children:"On a local VM with 7 vCPUs and 8\u202fGiB RAM, OpenRAG ingested approximately 5.03 GB across 1,083 files in about 42 minutes.\nThis equates to approximately 2.4 documents per second."}),"\n",(0,t.jsx)(n.p,{children:"You can generally expect equal or better performance on developer laptops and significantly faster on servers.\nThroughput scales with CPU cores, memory, storage speed, and configuration choices such as embedding model, chunk size and overlap, and concurrency."}),"\n",(0,t.jsx)(n.p,{children:"This test returned 12 errors (approximately 1.1%).\nAll errors were file\u2011specific, and they didn't stop the pipeline."}),"\n",(0,t.jsx)(n.p,{children:"Ingestion dataset:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Total files: 1,083 items mounted"}),"\n",(0,t.jsx)(n.li,{children:"Total size on disk: 5,026,474,862 bytes (approximately 5.03 GB)"}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"Hardware specifications:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Machine: Apple M4 Pro"}),"\n",(0,t.jsxs)(n.li,{children:["Podman VM:","\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["Name: ",(0,t.jsx)(n.code,{children:"podman-machine-default"})]}),"\n",(0,t.jsxs)(n.li,{children:["Type: ",(0,t.jsx)(n.code,{children:"applehv"})]}),"\n",(0,t.jsx)(n.li,{children:"vCPUs: 7"}),"\n",(0,t.jsx)(n.li,{children:"Memory: 8 GiB"}),"\n",(0,t.jsx)(n.li,{children:"Disk size: 100 GiB"}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"Test results:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-text",children:"2025-09-24T22:40:45.542190Z /app/src/main.py:231 Ingesting default documents when ready disable_langflow_ingest=False\n2025-09-24T22:40:45.546385Z /app/src/main.py:270 Using Langflow ingestion pipeline for default documents file_count=1082\n...\n2025-09-24T23:19:44.866365Z /app/src/main.py:351 Langflow ingestion completed success_count=1070 error_count=12 total_files=1082\n"})}),"\n",(0,t.jsx)(n.p,{children:"Elapsed time: ~42 minutes 15 seconds (2,535 seconds)"}),"\n",(0,t.jsx)(n.p,{children:"Throughput: ~2.4 documents/second"})]})}function h(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(d,{...e})}):d(e)}}}]); \ No newline at end of file diff --git a/assets/js/runtime~main.610e9562.js b/assets/js/runtime~main.610e9562.js deleted file mode 100644 index 8d51be94..00000000 --- a/assets/js/runtime~main.610e9562.js +++ /dev/null @@ -1 +0,0 @@ -(()=>{"use strict";var e,a,t,r,o,n={},c={};function d(e){var a=c[e];if(void 0!==a)return a.exports;var t=c[e]={id:e,loaded:!1,exports:{}};return n[e].call(t.exports,t,t.exports,d),t.loaded=!0,t.exports}d.m=n,d.c=c,e=[],d.O=(a,t,r,o)=>{if(!t){var n=1/0;for(b=0;b=o)&&Object.keys(d.O).every(e=>d.O[e](t[f]))?t.splice(f--,1):(c=!1,o0&&e[b-1][2]>o;b--)e[b]=e[b-1];e[b]=[t,r,o]},d.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return d.d(a,{a:a}),a},t=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,d.t=function(e,r){if(1&r&&(e=this(e)),8&r)return e;if("object"==typeof e&&e){if(4&r&&e.__esModule)return e;if(16&r&&"function"==typeof e.then)return e}var o=Object.create(null);d.r(o);var n={};a=a||[null,t({}),t([]),t(t)];for(var c=2&r&&e;("object"==typeof c||"function"==typeof c)&&!~a.indexOf(c);c=t(c))Object.getOwnPropertyNames(c).forEach(a=>n[a]=()=>e[a]);return n.default=()=>e,d.d(o,n),o},d.d=(e,a)=>{for(var t in a)d.o(a,t)&&!d.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:a[t]})},d.f={},d.e=e=>Promise.all(Object.keys(d.f).reduce((a,t)=>(d.f[t](e,a),a),[])),d.u=e=>"assets/js/"+({98:"af920ffe",571:"0ba6a408",1567:"22dd74f7",2076:"common",2272:"749371cc",2668:"eb5b356a",5742:"aba21aa0",5750:"d0314b07",6919:"ca2c3c0c",7098:"a7bd4aaa",8401:"17896441",9026:"c8078f0a",9048:"a94703ab",9172:"e633a5ea",9532:"33362219",9647:"5e95c892"}[e]||e)+"."+{98:"02db3b49",165:"c26d6a3e",291:"cd97fb9c",571:"29066fa2",617:"1447cf1d",1e3:"3d4d9f77",1203:"6c86c874",1567:"383b817e",1741:"070da722",1746:"4b809a87",2076:"3a9d628e",2130:"0c95b4e7",2237:"b77c091f",2272:"9415889d",2279:"0f9c36da",2291:"f017bd44",2325:"d0a48f1d",2334:"dbc94d93",2668:"0d98cd06",2821:"5850139d",3488:"7023792e",3490:"618f86cf",3815:"503d6709",4250:"12e4bf74",4379:"c2ae56a8",4616:"28af5598",4802:"bbffcf8d",4981:"1178b3ff",5480:"82213497",5742:"35622335",5750:"068d04d8",5901:"a84db486",5955:"6630df13",5996:"4315eaf2",6241:"70145387",6319:"c05e2240",6366:"03bf2589",6567:"44bf886a",6919:"71b9827a",6992:"ce79b8a2",7098:"bc099609",7592:"35906688",7873:"ad98bc5d",7928:"25b35ee3",8142:"538be8cd",8249:"7b519ea1",8401:"11b57c0a",8565:"cea3e8a9",8756:"37ec9e08",9026:"4aee576d",9032:"8d15afa4",9048:"bfa519e3",9172:"93136c05",9412:"74eee71d",9510:"f78dc8f8",9532:"d707ff03",9647:"251a99dd"}[e]+".js",d.miniCssF=e=>{},d.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),d.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),r={},o="openrag-docs:",d.l=(e,a,t,n)=>{if(r[e])r[e].push(a);else{var c,f;if(void 0!==t)for(var i=document.getElementsByTagName("script"),b=0;b{c.onerror=c.onload=null,clearTimeout(s);var o=r[e];if(delete r[e],c.parentNode&&c.parentNode.removeChild(c),o&&o.forEach(e=>e(t)),a)return a(t)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:c}),12e4);c.onerror=l.bind(null,c.onerror),c.onload=l.bind(null,c.onload),f&&document.head.appendChild(c)}},d.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},d.p="/",d.gca=function(e){return e={17896441:"8401",33362219:"9532",af920ffe:"98","0ba6a408":"571","22dd74f7":"1567",common:"2076","749371cc":"2272",eb5b356a:"2668",aba21aa0:"5742",d0314b07:"5750",ca2c3c0c:"6919",a7bd4aaa:"7098",c8078f0a:"9026",a94703ab:"9048",e633a5ea:"9172","5e95c892":"9647"}[e]||e,d.p+d.u(e)},(()=>{var e={5354:0,1869:0};d.f.j=(a,t)=>{var r=d.o(e,a)?e[a]:void 0;if(0!==r)if(r)t.push(r[2]);else if(/^(1869|5354)$/.test(a))e[a]=0;else{var o=new Promise((t,o)=>r=e[a]=[t,o]);t.push(r[2]=o);var n=d.p+d.u(a),c=new Error;d.l(n,t=>{if(d.o(e,a)&&(0!==(r=e[a])&&(e[a]=void 0),r)){var o=t&&("load"===t.type?"missing":t.type),n=t&&t.target&&t.target.src;c.message="Loading chunk "+a+" failed.\n("+o+": "+n+")",c.name="ChunkLoadError",c.type=o,c.request=n,r[1](c)}},"chunk-"+a,a)}},d.O.j=a=>0===e[a];var a=(a,t)=>{var r,o,n=t[0],c=t[1],f=t[2],i=0;if(n.some(a=>0!==e[a])){for(r in c)d.o(c,r)&&(d.m[r]=c[r]);if(f)var b=f(d)}for(a&&a(t);i{"use strict";var e,a,t,r,o,c={},n={};function f(e){var a=n[e];if(void 0!==a)return a.exports;var t=n[e]={id:e,loaded:!1,exports:{}};return c[e].call(t.exports,t,t.exports,f),t.loaded=!0,t.exports}f.m=c,f.c=n,e=[],f.O=(a,t,r,o)=>{if(!t){var c=1/0;for(b=0;b=o)&&Object.keys(f.O).every(e=>f.O[e](t[d]))?t.splice(d--,1):(n=!1,o0&&e[b-1][2]>o;b--)e[b]=e[b-1];e[b]=[t,r,o]},f.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return f.d(a,{a:a}),a},t=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,f.t=function(e,r){if(1&r&&(e=this(e)),8&r)return e;if("object"==typeof e&&e){if(4&r&&e.__esModule)return e;if(16&r&&"function"==typeof e.then)return e}var o=Object.create(null);f.r(o);var c={};a=a||[null,t({}),t([]),t(t)];for(var n=2&r&&e;("object"==typeof n||"function"==typeof n)&&!~a.indexOf(n);n=t(n))Object.getOwnPropertyNames(n).forEach(a=>c[a]=()=>e[a]);return c.default=()=>e,f.d(o,c),o},f.d=(e,a)=>{for(var t in a)f.o(a,t)&&!f.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:a[t]})},f.f={},f.e=e=>Promise.all(Object.keys(f.f).reduce((a,t)=>(f.f[t](e,a),a),[])),f.u=e=>"assets/js/"+({98:"af920ffe",571:"0ba6a408",1567:"22dd74f7",2076:"common",2272:"749371cc",2668:"eb5b356a",5742:"aba21aa0",5750:"d0314b07",6919:"ca2c3c0c",7098:"a7bd4aaa",8401:"17896441",9026:"c8078f0a",9048:"a94703ab",9172:"e633a5ea",9532:"33362219",9647:"5e95c892"}[e]||e)+"."+{98:"059a83cf",165:"c26d6a3e",291:"cd97fb9c",571:"b13d5c3c",617:"1447cf1d",1e3:"3d4d9f77",1203:"6c86c874",1567:"9190bfb2",1741:"070da722",1746:"4b809a87",2076:"3a9d628e",2130:"0c95b4e7",2237:"b77c091f",2272:"8584455d",2279:"0f9c36da",2291:"f017bd44",2325:"d0a48f1d",2334:"dbc94d93",2668:"0d98cd06",2821:"5850139d",3488:"7023792e",3490:"618f86cf",3815:"503d6709",4250:"12e4bf74",4379:"c2ae56a8",4616:"28af5598",4802:"bbffcf8d",4981:"1178b3ff",5480:"82213497",5742:"35622335",5750:"9ed76f3e",5901:"a84db486",5955:"6630df13",5996:"4315eaf2",6241:"70145387",6319:"c05e2240",6366:"03bf2589",6567:"44bf886a",6919:"4d983365",6992:"ce79b8a2",7098:"bc099609",7592:"35906688",7873:"ad98bc5d",7928:"25b35ee3",8142:"538be8cd",8249:"7b519ea1",8401:"11b57c0a",8565:"cea3e8a9",8756:"37ec9e08",9026:"4aee576d",9032:"8d15afa4",9048:"bfa519e3",9172:"380cba07",9412:"74eee71d",9510:"f78dc8f8",9532:"667bb0cb",9647:"251a99dd"}[e]+".js",f.miniCssF=e=>{},f.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),f.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),r={},o="openrag-docs:",f.l=(e,a,t,c)=>{if(r[e])r[e].push(a);else{var n,d;if(void 0!==t)for(var i=document.getElementsByTagName("script"),b=0;b{n.onerror=n.onload=null,clearTimeout(s);var o=r[e];if(delete r[e],n.parentNode&&n.parentNode.removeChild(n),o&&o.forEach(e=>e(t)),a)return a(t)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:n}),12e4);n.onerror=l.bind(null,n.onerror),n.onload=l.bind(null,n.onload),d&&document.head.appendChild(n)}},f.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},f.p="/",f.gca=function(e){return e={17896441:"8401",33362219:"9532",af920ffe:"98","0ba6a408":"571","22dd74f7":"1567",common:"2076","749371cc":"2272",eb5b356a:"2668",aba21aa0:"5742",d0314b07:"5750",ca2c3c0c:"6919",a7bd4aaa:"7098",c8078f0a:"9026",a94703ab:"9048",e633a5ea:"9172","5e95c892":"9647"}[e]||e,f.p+f.u(e)},(()=>{var e={5354:0,1869:0};f.f.j=(a,t)=>{var r=f.o(e,a)?e[a]:void 0;if(0!==r)if(r)t.push(r[2]);else if(/^(1869|5354)$/.test(a))e[a]=0;else{var o=new Promise((t,o)=>r=e[a]=[t,o]);t.push(r[2]=o);var c=f.p+f.u(a),n=new Error;f.l(c,t=>{if(f.o(e,a)&&(0!==(r=e[a])&&(e[a]=void 0),r)){var o=t&&("load"===t.type?"missing":t.type),c=t&&t.target&&t.target.src;n.message="Loading chunk "+a+" failed.\n("+o+": "+c+")",n.name="ChunkLoadError",n.type=o,n.request=c,r[1](n)}},"chunk-"+a,a)}},f.O.j=a=>0===e[a];var a=(a,t)=>{var r,o,c=t[0],n=t[1],d=t[2],i=0;if(c.some(a=>0!==e[a])){for(r in n)f.o(n,r)&&(f.m[r]=n[r]);if(d)var b=d(f)}for(a&&a(t);i Install OpenRAG containers | OpenRAG - + @@ -87,7 +87,8 @@ This enables
uv run python scripts/docling_ctl.py stop

Application onboarding

-

The first time you start OpenRAG, whether using the TUI or a .env file, you must complete application onboarding.

+

The first time you start OpenRAG, whether using the TUI or a .env file, it's recommended that you complete application onboarding.

+

To skip onboarding, click Skip onboarding.

Values from onboarding can be changed later in the OpenRAG Settings page.

Choose one LLM provider and complete only those steps:

    @@ -97,14 +98,16 @@ Alternatively, paste an OpenAI API key into the field.
  1. To load 2 sample PDFs, enable Sample dataset. This is recommended, but not required.
  2. Click Complete.
  3. +
  4. To complete the onboarding tasks, click What is OpenRAG, and then click Add a Document.
  5. Continue with the Quickstart.

Container management commands

diff --git a/index.html b/index.html index 10dcf0cc..091afaf9 100644 --- a/index.html +++ b/index.html @@ -4,7 +4,7 @@ What is OpenRAG? | OpenRAG - + @@ -15,7 +15,7 @@

OpenRAG connects and amplifies three popular, proven open-source projects into one powerful platform:

  • -

    Langflow: Langflow is a popular tool for building and deploying AI agents and MCP servers. It supports all major LLMs, vector databases, and a growing library of AI tools.

    +

    Langflow: Langflow is a versatile tool for building and deploying AI agents and MCP servers. It supports all major LLMs, vector databases, and a growing library of AI tools.

  • OpenSearch: OpenSearch is a community-driven, Apache 2.0-licensed open source search and analytics suite that makes it easy to ingest, search, visualize, and analyze data.

    diff --git a/ingestion/index.html b/ingestion/index.html index 4799170f..4e8663d8 100644 --- a/ingestion/index.html +++ b/ingestion/index.html @@ -3,38 +3,36 @@ -Docling in OpenRAG | OpenRAG - +Docling in OpenRAG | OpenRAG + -

    Docling in OpenRAG

    OpenRAG uses Docling for its document ingestion pipeline. +

    Docling in OpenRAG

    OpenRAG uses Docling for document ingestion. More specifically, OpenRAG uses Docling Serve, which starts a docling serve process on your local machine and runs Docling ingestion through an API service.

    Docling ingests documents from your local machine or OAuth connectors, splits them into chunks, and stores them as separate, structured documents in the OpenSearch documents index.

    OpenRAG chose Docling for its support for a wide variety of file formats, high performance, and advanced understanding of tables and images.

    -

    Docling ingestion settings

    +

    To modify OpenRAG's ingestion settings, including the Docling settings and ingestion flows, click 2" aria-hidden="true"/> Settings.

    +

    Knowledge ingestion settings

    These settings configure the Docling ingestion parameters.

    OpenRAG will warn you if docling serve is not running. To start or stop docling serve or any other native services, in the TUI main menu, click Start Native Services or Stop Native Services.

    -

    Embedding model determines which AI model is used to create vector embeddings. The default is text-embedding-3-small.

    +

    Embedding model determines which AI model is used to create vector embeddings. The default is the OpenAI text-embedding-3-small model.

    Chunk size determines how large each text chunk is in number of characters. Larger chunks yield more context per chunk, but may include irrelevant information. Smaller chunks yield more precise semantic search, but may lack context. The default value of 1000 characters provides a good starting point that balances these considerations.

    Chunk overlap controls the number of characters that overlap over chunk boundaries. Use larger overlap values for documents where context is most important, and use smaller overlap values for simpler documents, or when optimization is most important. The default value of 200 characters of overlap with a chunk size of 1000 (20% overlap) is suitable for general use cases. Decrease the overlap to 10% for a more efficient pipeline, or increase to 40% for more complex documents.

    +

    Table Structure enables Docling's DocumentConverter tool for parsing tables. Instead of treating tables as plain text, tables are output as structured table data with preserved relationships and metadata. Table Structure is enabled by default.

    OCR enables or disabled OCR processing when extracting text from images and scanned documents. OCR is disabled by default. This setting is best suited for processing text-based documents as quickly as possible with Docling's DocumentConverter. Images are ignored and not processed.

    Enable OCR when you are processing documents containing images with text that requires extraction, or for scanned documents. Enabling OCR can slow ingestion performance.

    If OpenRAG detects that the local machine is running on macOS, OpenRAG uses the ocrmac OCR engine. Other platforms use easyocr.

    Picture descriptions adds image descriptions generated by the SmolVLM-256M-Instruct model to OCR processing. Enabling picture descriptions can slow ingestion performance.

    -

    Use OpenRAG default ingestion instead of Docling serve

    -

    If you want to use OpenRAG's built-in pipeline instead of Docling serve, set DISABLE_INGEST_WITH_LANGFLOW=true in Environment variables.

    -

    The built-in pipeline still uses the Docling processor, but uses it directly without the Docling Serve API.

    -

    For more information, see processors.py in the OpenRAG repository.

    Knowledge ingestion flows

    Flows in Langflow are functional representations of application workflows, with multiple component nodes connected as single steps in a workflow.

    The OpenSearch Ingestion flow is the default knowledge ingestion flow in OpenRAG: when you Add Knowledge in OpenRAG, you run the OpenSearch Ingestion flow in the background. The flow ingests documents using Docling Serve to import and process documents.

    @@ -56,7 +54,11 @@ OpenRAG's visual editor is based on the Open Search Agent flow. The agent calls this component to fetch web content, and the results are ingested into OpenSearch.

    For more on using MCP clients in Langflow, see MCP clients.
    -To connect additional MCP servers to the MCP client, see Connect to MCP servers from your application.

    diff --git a/install/index.html b/install/index.html index 7bc6b53f..b6c50afd 100644 --- a/install/index.html +++ b/install/index.html @@ -4,7 +4,7 @@ Install OpenRAG with TUI | OpenRAG - + @@ -12,7 +12,7 @@

    Install OpenRAG with TUI

    Install the OpenRAG Python wheel, and then run the OpenRAG Terminal User Interface(TUI) to start your OpenRAG deployment with a guided setup process.

    -

    The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal, on any operating system.

    +

    The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal.

    OpenRAG TUI Interface

    Instead of starting OpenRAG using Docker commands and manually editing values in the .env file, the TUI walks you through the setup. It prompts for variables where required, creates a .env file for you, and then starts OpenRAG.

    Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs.

    @@ -72,11 +72,13 @@ If the TUI detects OAuth credentials, it enforces the Advanced SetupAdvanced Setup. For information about the difference between basic (no auth) and OAuth in OpenRAG, see Authentication and document access.

    1. To install OpenRAG with Basic Setup, click Basic Setup or press 1.
    2. -
    3. Click Generate Passwords to generate passwords for OpenSearch and Langflow.
    4. +
    5. Click Generate Passwords to generate passwords for OpenSearch and Langflow. +Only the OpenSearch Admin Password and OpenAI API key are required. +To generate the optional Langflow Admin Password, click Generate Password.
    6. Paste your OpenAI API key in the OpenAI API key field.
    7. Click Save Configuration. Your passwords are saved in the .env file used to start OpenRAG.
    8. -
    9. To start OpenRAG, click Start Container Services. +
    10. To start OpenRAG, click Start All Services. Startup pulls container images and runs them, so it can take some time. When startup is complete, the TUI displays the following:
      Services started successfully
      Command completed successfully
      @@ -107,13 +109,13 @@ Register these redirect values with your OAuth provider as they are presented in

      Click Save Configuration.

    11. -

      To start OpenRAG, click Start Container Services. +

      To start OpenRAG, click Start All Services. Startup pulls container images and runs them, so it can take some time. When startup is complete, the TUI displays the following:

      Services started successfully
      Command completed successfully
    12. -

      To open the OpenRAG application, click Open App, press 6, or navigate to http://localhost:3000. +

      To open the OpenRAG application, click Open App. You are presented with your provider's OAuth sign-in screen. After sign-in, you are redirected to the redirect URI.

      Two additional variables are available for Advanced Setup:

      @@ -132,7 +134,8 @@ Supported webhook endpoints:

    Application onboarding

    -

    The first time you start OpenRAG, whether using the TUI or a .env file, you must complete application onboarding.

    +

    The first time you start OpenRAG, whether using the TUI or a .env file, it's recommended that you complete application onboarding.

    +

    To skip onboarding, click Skip onboarding.

    Values from onboarding can be changed later in the OpenRAG Settings page.

    Choose one LLM provider and complete only those steps:

      @@ -142,14 +145,16 @@ Alternatively, paste an OpenAI API key into the field.
    1. To load 2 sample PDFs, enable Sample dataset. This is recommended, but not required.
    2. Click Complete.
    3. +
    4. To complete the onboarding tasks, click What is OpenRAG, and then click Add a Document.
    5. Continue with the Quickstart.

    Manage OpenRAG containers with the TUI

    After installation, the TUI can deploy, manage, and upgrade your OpenRAG containers.

    -

    Start container services

    -

    Click Start Container Services to start the OpenRAG containers. +

    Start all services

    +

    Click Start All Services to start the OpenRAG containers. The TUI automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for CUDA, NVIDIA_SMI, and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses. The TUI then pulls the images and deploys the containers with the following command.

    docker compose up -d

    If images are missing, the TUI runs docker compose pull, then runs docker compose up -d.

    -

    Start native services

    -

    A "native" service in OpenRAG refers to a service run natively on your machine, and not within a container. -The docling serve process is a native service in OpenRAG, because it's a document processing service that is run on your local machine, and controlled separately from the containers.

    -

    To start or stop docling serve or any other native services, in the TUI main menu, click Start Native Services or Stop Native Services.

    -

    To view the status, port, or PID of a native service, in the TUI main menu, click Status.

    Status

    The Status menu displays information on your container deployment. Here you can check container health, find your service ports, view logs, and upgrade your containers.

    @@ -192,8 +193,13 @@ It first stops and removes all containers, volumes, and local images.

    docker compose down --volumes --remove-orphans --rmi local

    When the first command is complete, OpenRAG removes any additional Docker objects with prune.

    docker system prune -f
    +

    Native services status

    +

    A native service in OpenRAG refers to a service run locally on your machine, and not within a container. +The docling serve process is a native service in OpenRAG, because it's a document processing service that is run on your local machine, and controlled separately from the containers.

    +

    To start or stop docling serve or any other native services, in the TUI Status menu, click Stop or Restart.

    +

    To view the status, port, or PID of a native service, in the TUI main menu, click Status.

    Diagnostics

    -

    The Diagnostics menu provides health monitoring for your container runtimes and monitoring of your OpenSearch security.

    diff --git a/knowledge/index.html b/knowledge/index.html index 49466208..afb43a21 100644 --- a/knowledge/index.html +++ b/knowledge/index.html @@ -4,7 +4,7 @@ OpenSearch in OpenRAG | OpenRAG - + @@ -24,10 +24,11 @@ To configure the knowledge ingestion pipeline parameters, see Direct file ingestion

    The Knowledge Ingest flow uses Langflow's File component to split and embed files loaded from your local machine into the OpenSearch database.

    The default path to your local folder is mounted from the ./documents folder in your OpenRAG project directory to the /app/documents/ directory inside the Docker container. Files added to the host or the container will be visible in both locations. To configure this location, modify the Documents Paths variable in either the TUI's Advanced Setup menu or in the .env used by Docker Compose.

    -

    To load and process a single file from the mapped location, click Add Knowledge, and then click Add File. +

    To load and process a single file from the mapped location, click Add Knowledge, and then click File. The file is loaded into your OpenSearch database, and appears in the Knowledge page.

    -

    To load and process a directory from the mapped location, click Add Knowledge, and then click Process Folder. +

    To load and process a directory from the mapped location, click Add Knowledge, and then click Folder. The files are loaded into your OpenSearch database, and appear in the Knowledge page.

    +

    To add files directly to a chat session, click in the chat input and select the files you want to include. Files added this way are processed and made available to the agent for the current conversation, and are not permanently added to the knowledge base.

    Ingest files through OAuth connectors

    OpenRAG supports Google Drive, OneDrive, and Sharepoint as OAuth connectors for seamless document synchronization.

    OAuth integration allows individual users to connect their personal cloud storage accounts to OpenRAG. Each user must separately authorize OpenRAG to access their own cloud storage files. When a user connects a cloud service, they are redirected to authenticate with that service provider and grant OpenRAG permission to sync documents from their personal cloud storage.

    @@ -44,20 +45,21 @@ The TUI generates a new .env file with your OAuth values.
  • Click Start Container Services.
  • The OpenRAG frontend at http://localhost:3000 now redirects to an OAuth callback login page for your OAuth provider. A successful authentication opens OpenRAG with the required scopes for your connected storage.

    To add knowledge from an OAuth-connected storage provider, do the following:

      -
    1. Click Add Knowledge, and then select the storage provider, for example, Google Drive. +
    2. Click Add Knowledge, and then select the storage provider, for example, Google Drive. The Add Cloud Knowledge page opens.
    3. -
    4. To add files or folders from the connected storage, click Add Files. +
    5. To add files or folders from the connected storage, click Add Files. Select the files or folders you want and click Select. -You can select multiples.
    6. +You can select multiple files.
    7. When your files are selected, click Ingest Files. The ingestion process may take some time, depending on the size of your documents.
    8. When ingestion is complete, your documents are available in the Knowledge screen.
    9. @@ -77,32 +79,33 @@ Knowledge filters are saved search configurations that allow you to create custo

      To create a knowledge filter, do the following:

      1. -

        Click All Knowledge, and then click Create New Filter. -The Create New Knowledge Filter pane appears.

        +

        Click Knowledge, and then click Knowledge Filters. +The Knowledge Filter pane appears.

      2. -

        Enter a Name and Description, and then click Create Filter. -A new filter is created with default settings that match everything.

        +

        Enter a Name and Description, and then click Create Filter. +A new filter is created with default settings that match all documents.

      3. -

        To modify the default filter, click All Knowledge, and then click your new filter to edit it in the Knowledge Filter pane.

        +

        To modify the filter, click Knowledge, and then click your new filter to edit it in the Knowledge Filter pane.

        The following filter options are configurable.

        • Search Query: Enter text for semantic search, such as "financial reports from Q4".
        • Data Sources: Select specific data sources or folders to include.
        • Document Types: Filter by file type.
        • Owners: Filter by who uploaded the documents.
        • -
        • Sources: Filter by connector types, such as local upload or Google Drive.
        • -
        • Result Limit: Set maximum number of results. The default is 10.
        • +
        • Connectors: Filter by connector types, such as local upload or Google Drive.
        • +
        • Response Limit: Set maximum number of results. The default is 10.
        • Score Threshold: Set minimum relevance score. The default score is 0.
      4. -

        When you're done editing the filter, click Save Configuration.

        +

        When you're done editing the filter, click Update Filter.

      5. -

        To apply the filter to OpenRAG globally, click All Knowledge, and then select the filter to apply.

        -

        To apply the filter to a single chat session, in the Chat window, click @, and then select the filter to apply.

        +

        To apply the filter to OpenRAG globally, click Knowledge, and then select the filter to apply. One filter can be enabled at a time.

        +

        To apply the filter to a single chat session, in the Chat window, click , and then select the filter to apply.

        +

        To delete the filter, in the Knowledge Filter pane, click Delete Filter.

      OpenRAG default configuration

      diff --git a/quickstart/index.html b/quickstart/index.html index ca7e9ad2..75ef9bc9 100644 --- a/quickstart/index.html +++ b/quickstart/index.html @@ -3,15 +3,15 @@ -Quickstart | OpenRAG - +Quickstart | OpenRAG + -

      Quickstart

      Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the OpenRAG API.

      +

      Quickstart

      Get started with OpenRAG by loading your knowledge, swapping out your language model, and then chatting with the Langflow API.

      Prerequisites

      • Install and start OpenRAG with the TUI or Docker
      • @@ -20,23 +20,23 @@
        1. In OpenRAG, click Chat. The chat is powered by the OpenRAG OpenSearch Agent. -For more information, see Langflow Agents.
        2. +For more information, see Langflow in OpenRAG.
        3. Ask What documents are available to you? The agent responds with a message summarizing the documents that OpenRAG loads by default. Knowledge is stored in OpenSearch. -For more information, see Knowledge.
        4. +For more information, see OpenSearch in OpenRAG.
        5. To confirm the agent is correct about the default knowledge, click Knowledge. The Knowledge page lists the documents OpenRAG has ingested into the OpenSearch vector database. -Click on a document to display the chunks derived from splitting the default documents into the vector database.
        6. -
        7. To add documents to your knowledge base, click Add Knowledge. +Click on a document to display the chunks derived from splitting the default documents into the OpenSearch vector database.
        8. +
        9. To add documents to your knowledge base, click Add Knowledge.
            -
          • Select Add File to add a single file from your local machine.
          • -
          • Select Process Folder to process an entire folder of documents from your local machine.
          • +
          • Select File to add a single file from your local machine.
          • +
          • Select Folder to process an entire folder of documents from your local machine. The default directory is /documents in your OpenRAG directory.
          • Select your cloud storage provider to add knowledge from an OAuth-connected storage provider. For more information, see OAuth ingestion.
        10. Return to the Chat window and ask a question about your loaded data. -For example, with a manual about a PC tablet loaded, ask How do I connect this device to WiFI? +For example, with a manual about a PC tablet loaded, ask How do I connect this device to WiFi? The agent responds with a message indicating it now has your knowledge as context for answering questions.
        11. Click Function Call: search_documents (tool_call). This log describes how the agent uses tools. @@ -48,11 +48,13 @@ This is helpful for troubleshooting when the agent isn't responding as expe
          1. To edit the Agent's behavior, click Edit in Langflow. -You can more quickly access the Language Model and Agent Instructions fields in this page, but for illustration purposes, navigate to the Langflow visual builder.

            +You can more quickly access the Language Model and Agent Instructions fields in this page, but for illustration purposes, navigate to the Langflow visual builder. +To revert the flow to its initial state, click Restore flow.

          2. -

            OpenRAG warns you that you're entering Langflow. Click Proceed. -The OpenRAG OpenSearch Agent flow appears in a new browser window. +

            OpenRAG warns you that you're entering Langflow. Click Proceed.

            +

            If Langflow requests login information, enter the LANGFLOW_SUPERUSER and LANGFLOW_SUPERUSER_PASSWORD from the .env file in your OpenRAG directory.

            +

            The OpenRAG OpenSearch Agent flow appears in a new browser window. OpenRAG Open Search Agent Flow

          3. diff --git a/reference/configuration/index.html b/reference/configuration/index.html index cd39f581..a8315d3a 100644 --- a/reference/configuration/index.html +++ b/reference/configuration/index.html @@ -4,7 +4,7 @@ Environment variables | OpenRAG - + diff --git a/support/troubleshoot/index.html b/support/troubleshoot/index.html index 78d73094..675744ac 100644 --- a/support/troubleshoot/index.html +++ b/support/troubleshoot/index.html @@ -4,7 +4,7 @@ Troubleshooting | OpenRAG - +