diff --git a/404.html b/404.html index 8d1c5c46..fceec17b 100644 --- a/404.html +++ b/404.html @@ -4,7 +4,7 @@ OpenRAG - + diff --git a/agents/index.html b/agents/index.html index 26491e45..b7ad1f71 100644 --- a/agents/index.html +++ b/agents/index.html @@ -4,7 +4,7 @@ Use Langflow in OpenRAG | OpenRAG - + diff --git a/assets/js/0ba6a408.dd2bbad9.js b/assets/js/0ba6a408.dd2bbad9.js deleted file mode 100644 index e09fd8f4..00000000 --- a/assets/js/0ba6a408.dd2bbad9.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[571],{3227:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>h,contentTitle:()=>c,default:()=>g,frontMatter:()=>l,metadata:()=>t,toc:()=>u});const t=JSON.parse('{"id":"core-components/knowledge","title":"Configure knowledge","description":"OpenRAG includes a built-in OpenSearch instance that serves as the underlying datastore for your knowledge (documents).","source":"@site/docs/core-components/knowledge.mdx","sourceDirName":"core-components","slug":"/knowledge","permalink":"/knowledge","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/knowledge.mdx","tags":[],"version":"current","frontMatter":{"title":"Configure knowledge","slug":"/knowledge"},"sidebar":"tutorialSidebar","previous":{"title":"Flows","permalink":"/agents"},"next":{"title":"Ingest knowledge","permalink":"/ingestion"}}');var i=s(4848),o=s(8453),r=s(7733),a=s(1470),d=s(9365);const l={title:"Configure knowledge",slug:"/knowledge"},c=void 0,h={},u=[{value:"Browse knowledge",id:"browse-knowledge",level:2},{value:"OpenSearch authentication and document access",id:"auth",level:2},{value:"OpenSearch indexes",id:"opensearch-indexes",level:2},{value:"Knowledge ingestion settings",id:"knowledge-ingestion-settings",level:2},{value:"Set the embedding model and dimensions",id:"set-the-embedding-model-and-dimensions",level:3},{value:"Set Docling parameters",id:"set-docling-parameters",level:3},{value:"Set the local documents path",id:"set-the-local-documents-path",level:3},{value:"Delete knowledge",id:"delete-knowledge",level:2},{value:"See also",id:"see-also",level:2}];function p(e){const n={a:"a",admonition:"admonition",code:"code",em:"em",h2:"h2",h3:"h3",li:"li",p:"p",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsxs)(n.p,{children:["OpenRAG includes a built-in ",(0,i.jsx)(n.a,{href:"https://docs.opensearch.org/latest/",children:"OpenSearch"})," instance that serves as the underlying datastore for your ",(0,i.jsx)(n.em,{children:"knowledge"})," (documents).\nThis specialized database is used to store and retrieve your documents and the associated vector data (embeddings)."]}),"\n",(0,i.jsxs)(n.p,{children:["The documents in your OpenSearch knowledge base provide specialized context in addition to the general knowledge available to the language model that you select when you ",(0,i.jsx)(n.a,{href:"/install",children:"install OpenRAG"})," or ",(0,i.jsx)(n.a,{href:"/agents",children:"edit a flow"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["You can ",(0,i.jsx)(n.a,{href:"/ingestion",children:"upload documents"})," from a variety of sources to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.\nDocuments are processed through OpenRAG's knowledge ingestion flows with Docling."]}),"\n",(0,i.jsxs)(n.p,{children:["Then, the ",(0,i.jsxs)(n.a,{href:"/chat",children:["OpenRAG ",(0,i.jsx)(n.strong,{children:"Chat"})]})," can run ",(0,i.jsx)(n.a,{href:"https://www.ibm.com/think/topics/vector-search",children:"similarity searches"})," against your OpenSearch database to retrieve relevant information and generate context-aware responses."]}),"\n",(0,i.jsxs)(n.p,{children:["You can configure how documents are ingested and how the ",(0,i.jsx)(n.strong,{children:"Chat"})," interacts with your knowledge base."]}),"\n",(0,i.jsx)(n.h2,{id:"browse-knowledge",children:"Browse knowledge"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Knowledge"})," page lists the documents OpenRAG has ingested into your OpenSearch database, specifically in an ",(0,i.jsx)(n.a,{href:"https://docs.opensearch.org/latest/getting-started/intro/#index",children:"OpenSearch index"})," named ",(0,i.jsx)(n.code,{children:"documents"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To explore the raw contents of your knowledge base, click ",(0,i.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,i.jsx)(n.strong,{children:"Knowledge"})," to get a list of all ingested documents.\nClick a document to view the chunks produced from splitting the document during ingestion."]}),"\n",(0,i.jsxs)(n.p,{children:["OpenRAG includes some initial documents about OpenRAG. You can use these documents to ask OpenRAG about itself, and to test the ",(0,i.jsx)(n.a,{href:"/chat",children:(0,i.jsx)(n.strong,{children:"Chat"})})," feature before uploading your own documents.\nIf you ",(0,i.jsx)(n.a,{href:"#delete-knowledge",children:"delete these documents"}),", you won't be able to ask OpenRAG about itself and it's own functionality.\nIt is recommended that you keep these documents, and use ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"filters"})," to separate them from your other knowledge."]}),"\n",(0,i.jsx)(n.h2,{id:"auth",children:"OpenSearch authentication and document access"}),"\n",(0,i.jsxs)(n.p,{children:["When you ",(0,i.jsx)(n.a,{href:"/install",children:"install OpenRAG"}),", you can choose between two setup modes: ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," and ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),".\nThe mode you choose determines how OpenRAG authenticates with OpenSearch and controls access to documents:"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Basic Setup (no-auth mode)"}),": If you choose ",(0,i.jsx)(n.strong,{children:"Basic Setup"}),", then OpenRAG is installed in no-auth mode.\nThis mode uses one, anonymous JWT token for OpenSearch authentication.\nThere is no differentiation between users.\nAll users that access your OpenRAG instance can access all documents uploaded to your OpenSearch knowledge base."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Advanced Setup (OAuth mode)"}),": If you choose ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),", then OpenRAG is installed in OAuth mode.\nThis mode uses a unique JWT token for each OpenRAG user, and each document is tagged with user ownership. Documents are filtered by user owner.\nThis means users see only the documents that they uploaded or have access to."]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:["You can enable OAuth mode after installation.\nFor more information, see ",(0,i.jsx)(n.a,{href:"/ingestion#oauth-ingestion",children:"Ingest files with OAuth connectors"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"opensearch-indexes",children:"OpenSearch indexes"}),"\n",(0,i.jsxs)(n.p,{children:["An ",(0,i.jsx)(n.a,{href:"https://docs.opensearch.org/latest/getting-started/intro/#index",children:"OpenSearch index"})," is a collection of documents in an OpenSearch database."]}),"\n",(0,i.jsxs)(n.p,{children:["By default, all documents you upload to your OpenRAG knowledge base are stored in an index named ",(0,i.jsx)(n.code,{children:"documents"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["It is possible to change the index name by ",(0,i.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"editing the ingestion flow"}),".\nHowever, this can impact dependent processes, such as the ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"filters"})," and ",(0,i.jsx)(n.a,{href:"/chat",children:(0,i.jsx)(n.strong,{children:"Chat"})})," flow, that reference the ",(0,i.jsx)(n.code,{children:"documents"})," index by default.\nMake sure you edit other flows as needed to ensure all processes use the same index name."]}),"\n",(0,i.jsxs)(n.p,{children:["If you encounter errors or unexpected behavior after changing the index name, you can ",(0,i.jsx)(n.a,{href:"/agents#revert-a-built-in-flow-to-its-original-configuration",children:"revert the flows to their original configuration"}),", or ",(0,i.jsx)(n.a,{href:"/knowledge#delete-knowledge",children:"delete knowledge"})," to clear the existing documents from your knowledge base."]}),"\n",(0,i.jsx)(n.h2,{id:"knowledge-ingestion-settings",children:"Knowledge ingestion settings"}),"\n",(0,i.jsx)(n.admonition,{type:"warning",children:(0,i.jsx)(n.p,{children:"Knowledge ingestion settings apply to documents you upload after making the changes.\nDocuments uploaded before changing these settings aren't reprocessed."})}),"\n",(0,i.jsx)(n.p,{children:"After changing knowledge ingestion settings, you must determine if you need to reupload any documents to be consistent with the new settings."}),"\n",(0,i.jsx)(n.p,{children:"It isn't always necessary to reupload documents after changing knowledge ingestion settings.\nFor example, it is typical to upload some documents with OCR enabled and others without OCR enabled."}),"\n",(0,i.jsxs)(n.p,{children:["If needed, you can use ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"filters"})," to separate documents that you uploaded with different settings, such as different embedding models."]}),"\n",(0,i.jsx)(n.h3,{id:"set-the-embedding-model-and-dimensions",children:"Set the embedding model and dimensions"}),"\n",(0,i.jsxs)(n.p,{children:["When you ",(0,i.jsx)(n.a,{href:"/install",children:"install OpenRAG"}),", you select at least one embedding model during ",(0,i.jsx)(n.a,{href:"/install#application-onboarding",children:"application onboarding"}),".\nOpenRAG automatically detects and configures the appropriate vector dimensions for your selected embedding model, ensuring optimal search performance and compatibility."]}),"\n",(0,i.jsxs)(n.p,{children:["In the OpenRAG repository, you can find the complete list of supported models in ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/services/models_service.py",children:(0,i.jsx)(n.code,{children:"models_service.py"})})," and the corresponding vector dimensions in ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py",children:(0,i.jsx)(n.code,{children:"settings.py"})}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["During application onboarding, you can select from the supported models.\nThe default embedding dimension is ",(0,i.jsx)(n.code,{children:"1536"}),", and the default model is the OpenAI ",(0,i.jsx)(n.code,{children:"text-embedding-3-small"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["If you want to use an unsupported model, you must manually set the model in your ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG configuration"}),".\nIf you use an unsupported embedding model that doesn't have defined dimensions in ",(0,i.jsx)(n.code,{children:"settings.py"}),", then OpenRAG falls back to the default dimensions (1536) and logs a warning. OpenRAG's OpenSearch instance and flows continue to work, but ",(0,i.jsx)(n.a,{href:"https://www.ibm.com/think/topics/vector-search",children:"similarity search"})," quality can be affected if the actual model dimensions aren't 1536."]}),"\n",(0,i.jsxs)(n.p,{children:["To change the embedding model after onboarding, it is recommended that you modify the embedding model setting in the OpenRAG ",(0,i.jsx)(n.strong,{children:"Settings"})," page or in your ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG configuration"}),".\nThis will automatically update all relevant ",(0,i.jsx)(n.a,{href:"/agents",children:"OpenRAG flows"})," to use the new embedding model configuration."]}),"\n",(0,i.jsx)(n.h3,{id:"set-docling-parameters",children:"Set Docling parameters"}),"\n",(0,i.jsxs)(n.p,{children:["OpenRAG uses ",(0,i.jsx)(n.a,{href:"https://docling-project.github.io/docling/",children:"Docling"})," for document ingestion because it supports many file formats, processes tables and images well, and performs efficiently."]}),"\n",(0,i.jsxs)(n.p,{children:["When you ",(0,i.jsx)(n.a,{href:"/ingestion",children:"upload documents"}),", Docling processes the files, splits them into chunks, and stores them as separate, structured documents in your OpenSearch knowledge base."]}),"\n",(0,i.jsx)(n.p,{children:"You can use either Docling Serve or OpenRAG's built-in Docling ingestion pipeline to process documents."}),"\n",(0,i.jsxs)(a.A,{children:[(0,i.jsx)(d.A,{value:"serve",label:"Docling Serve ingestion",default:!0,children:(0,i.jsxs)(n.p,{children:["By default, OpenRAG uses ",(0,i.jsx)(n.a,{href:"https://github.com/docling-project/docling-serve",children:"Docling Serve"}),".\nThis means that OpenRAG starts a ",(0,i.jsx)(n.code,{children:"docling serve"})," process on your local machine and runs Docling ingestion through an API service."]})}),(0,i.jsxs)(d.A,{value:"docling",label:"Built-in Docling ingestion",children:[(0,i.jsxs)(n.p,{children:["If you want to use OpenRAG's built-in Docling ingestion pipeline instead of the separate Docling Serve service, set ",(0,i.jsx)(n.code,{children:"DISABLE_INGEST_WITH_LANGFLOW=true"})," in your ",(0,i.jsx)(n.a,{href:"/reference/configuration#document-processing",children:"OpenRAG environment variables"}),"."]}),(0,i.jsx)(n.p,{children:"The built-in pipeline uses the Docling processor directly instead of through the Docling Serve API."}),(0,i.jsxs)(n.p,{children:["For the underlying functionality, see ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/models/processors.py#L58",children:(0,i.jsx)(n.code,{children:"processors.py"})})," in the OpenRAG repository."]})]})]}),"\n",(0,i.jsxs)(n.p,{children:["To modify the Docling ingestion and embedding parameters, click ",(0,i.jsx)(r.A,{name:"Settings2","aria-hidden":"true"})," ",(0,i.jsx)(n.strong,{children:"Settings"})," in the OpenRAG user interface."]}),"\n",(0,i.jsx)(n.admonition,{type:"tip",children:(0,i.jsxs)(n.p,{children:["OpenRAG warns you if ",(0,i.jsx)(n.code,{children:"docling serve"})," isn't running.\nYou can ",(0,i.jsx)(n.a,{href:"/install#tui-container-management",children:"start and stop OpenRAG services"})," from the TUI main menu with ",(0,i.jsx)(n.strong,{children:"Start Native Services"})," or ",(0,i.jsx)(n.strong,{children:"Stop Native Services"}),"."]})}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Embedding model"}),": Select the model to use to generate vector embeddings for your documents."]}),"\n",(0,i.jsxs)(n.p,{children:["This is initially set during installation.\nThe recommended way to change this setting is in the OpenRAG ",(0,i.jsx)(n.strong,{children:"Settings"})," or your ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG configuration"}),".\nThis will automatically update all relevant ",(0,i.jsx)(n.a,{href:"/agents",children:"OpenRAG flows"})," to use the new embedding model configuration."]}),"\n",(0,i.jsxs)(n.p,{children:["If you uploaded documents prior to changing the embedding model, you can ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"create filters"})," to separate documents embedded with different models, or you can reupload all documents to regenerate embeddings with the new model.\nIf you want to use multiple embeddings models, similarity search (in the ",(0,i.jsx)(n.strong,{children:"Chat"}),") can take longer as it searching each model's embeddings separately."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Chunk size"}),": Set the number of characters for each text chunk when breaking down a file.\nLarger chunks yield more context per chunk, but can include irrelevant information. Smaller chunks yield more precise semantic search, but can lack context.\nThe default value is 1000 characters, which is usually a good balance between context and precision."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Chunk overlap"}),": Set the number of characters to overlap over chunk boundaries.\nUse larger overlap values for documents where context is most important. Use smaller overlap values for simpler documents or when optimization is most important.\nThe default value is 200 characters, which represents an overlap of 20 percent if the ",(0,i.jsx)(n.strong,{children:"Chunk size"})," is 1000. This is suitable for general use. For faster processing, decrease the overlap to approximately 10 percent. For more complex documents where you need to preserve context across chunks, increase it to approximately 40 percent."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Table Structure"}),": Enables Docling's ",(0,i.jsx)(n.a,{href:"https://docling-project.github.io/docling/reference/document_converter/",children:(0,i.jsx)(n.code,{children:"DocumentConverter"})})," tool for parsing tables. Instead of treating tables as plain text, tables are output as structured table data with preserved relationships and metadata. This option is enabled by default."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"OCR"}),": Enables Optical Character Recognition (OCR) processing when extracting text from images and ingesting scanned documents. This setting is best suited for processing text-based documents faster with Docling's ",(0,i.jsx)(n.a,{href:"https://docling-project.github.io/docling/reference/document_converter/",children:(0,i.jsx)(n.code,{children:"DocumentConverter"})}),". Images are ignored and not processed."]}),"\n",(0,i.jsx)(n.p,{children:"This option is disabled by default. Enabling OCR can slow ingestion performance."}),"\n",(0,i.jsxs)(n.p,{children:["If OpenRAG detects that the local machine is running on macOS, OpenRAG uses the ",(0,i.jsx)(n.a,{href:"https://www.piwheels.org/project/ocrmac/",children:"ocrmac"})," OCR engine. Other platforms use ",(0,i.jsx)(n.a,{href:"https://www.jaided.ai/easyocr/",children:"easyocr"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Picture descriptions"}),": Only applicable if ",(0,i.jsx)(n.strong,{children:"OCR"})," is enabled. Adds image descriptions generated by the ",(0,i.jsx)(n.a,{href:"https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct",children:(0,i.jsx)(n.code,{children:"SmolVLM-256M-Instruct"})})," model. Enabling picture descriptions can slow ingestion performance."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h3,{id:"set-the-local-documents-path",children:"Set the local documents path"}),"\n",(0,i.jsxs)(n.p,{children:["The default path for local uploads is the ",(0,i.jsx)(n.code,{children:"./openrag-documents"})," subdirectory in your OpenRAG installation directory. This is mounted to the ",(0,i.jsx)(n.code,{children:"/app/openrag-documents/"})," directory inside the OpenRAG container. Files added to the host or container directory are visible in both locations."]}),"\n",(0,i.jsxs)(n.p,{children:["To change this location, modify the ",(0,i.jsx)(n.strong,{children:"Documents Paths"})," variable in either the ",(0,i.jsxs)(n.a,{href:"/install#setup",children:[(0,i.jsx)(n.strong,{children:"Advanced Setup"})," menu"]})," or in the ",(0,i.jsx)(n.code,{children:".env"})," used by Docker Compose."]}),"\n",(0,i.jsx)(n.h2,{id:"delete-knowledge",children:"Delete knowledge"}),"\n",(0,i.jsxs)(n.p,{children:["To clear your entire knowledge base, delete the contents of the ",(0,i.jsx)(n.code,{children:"./opensearch-data"})," folder in your OpenRAG installation directory.\nThis is a destructive operation that cannot be undone."]}),"\n",(0,i.jsx)(n.h2,{id:"see-also",children:"See also"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/ingestion",children:"Ingest knowledge"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"Filter knowledge"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/chat",children:"Chat with knowledge"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"})}),"\n"]})]})}function g(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(p,{...e})}):p(e)}}}]); \ No newline at end of file diff --git a/assets/js/0ba6a408.eddb3e7e.js b/assets/js/0ba6a408.eddb3e7e.js new file mode 100644 index 00000000..a6471f28 --- /dev/null +++ b/assets/js/0ba6a408.eddb3e7e.js @@ -0,0 +1 @@ +"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[571],{3227:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>h,contentTitle:()=>c,default:()=>g,frontMatter:()=>l,metadata:()=>t,toc:()=>u});const t=JSON.parse('{"id":"core-components/knowledge","title":"Configure knowledge","description":"OpenRAG includes a built-in OpenSearch instance that serves as the underlying datastore for your knowledge (documents).","source":"@site/docs/core-components/knowledge.mdx","sourceDirName":"core-components","slug":"/knowledge","permalink":"/knowledge","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/knowledge.mdx","tags":[],"version":"current","frontMatter":{"title":"Configure knowledge","slug":"/knowledge"},"sidebar":"tutorialSidebar","previous":{"title":"Flows","permalink":"/agents"},"next":{"title":"Ingest knowledge","permalink":"/ingestion"}}');var i=s(4848),o=s(8453),r=s(7733),a=s(1470),d=s(9365);const l={title:"Configure knowledge",slug:"/knowledge"},c=void 0,h={},u=[{value:"Browse knowledge",id:"browse-knowledge",level:2},{value:"OpenSearch authentication and document access",id:"auth",level:2},{value:"OpenSearch indexes",id:"opensearch-indexes",level:2},{value:"Knowledge ingestion settings",id:"knowledge-ingestion-settings",level:2},{value:"Set the embedding model and dimensions",id:"set-the-embedding-model-and-dimensions",level:3},{value:"Set Docling parameters",id:"set-docling-parameters",level:3},{value:"Set the local documents path",id:"set-the-local-documents-path",level:3},{value:"Delete knowledge",id:"delete-knowledge",level:2},{value:"See also",id:"see-also",level:2}];function p(e){const n={a:"a",admonition:"admonition",code:"code",em:"em",h2:"h2",h3:"h3",li:"li",p:"p",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsxs)(n.p,{children:["OpenRAG includes a built-in ",(0,i.jsx)(n.a,{href:"https://docs.opensearch.org/latest/",children:"OpenSearch"})," instance that serves as the underlying datastore for your ",(0,i.jsx)(n.em,{children:"knowledge"})," (documents).\nThis specialized database is used to store and retrieve your documents and the associated vector data (embeddings)."]}),"\n",(0,i.jsxs)(n.p,{children:["The documents in your OpenSearch knowledge base provide specialized context in addition to the general knowledge available to the language model that you select when you ",(0,i.jsx)(n.a,{href:"/install",children:"install OpenRAG"})," or ",(0,i.jsx)(n.a,{href:"/agents",children:"edit a flow"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["You can ",(0,i.jsx)(n.a,{href:"/ingestion",children:"upload documents"})," from a variety of sources to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.\nDocuments are processed through OpenRAG's knowledge ingestion flows with Docling."]}),"\n",(0,i.jsxs)(n.p,{children:["Then, the ",(0,i.jsxs)(n.a,{href:"/chat",children:["OpenRAG ",(0,i.jsx)(n.strong,{children:"Chat"})]})," can run ",(0,i.jsx)(n.a,{href:"https://www.ibm.com/think/topics/vector-search",children:"similarity searches"})," against your OpenSearch database to retrieve relevant information and generate context-aware responses."]}),"\n",(0,i.jsxs)(n.p,{children:["You can configure how documents are ingested and how the ",(0,i.jsx)(n.strong,{children:"Chat"})," interacts with your knowledge base."]}),"\n",(0,i.jsx)(n.h2,{id:"browse-knowledge",children:"Browse knowledge"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Knowledge"})," page lists the documents OpenRAG has ingested into your OpenSearch database, specifically in an ",(0,i.jsx)(n.a,{href:"https://docs.opensearch.org/latest/getting-started/intro/#index",children:"OpenSearch index"})," named ",(0,i.jsx)(n.code,{children:"documents"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To explore the raw contents of your knowledge base, click ",(0,i.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,i.jsx)(n.strong,{children:"Knowledge"})," to get a list of all ingested documents.\nClick a document to view the chunks produced from splitting the document during ingestion."]}),"\n",(0,i.jsxs)(n.p,{children:["By default, OpenRAG includes some initial documents about OpenRAG. You can use these documents to ask OpenRAG about itself, and to test the ",(0,i.jsx)(n.a,{href:"/chat",children:(0,i.jsx)(n.strong,{children:"Chat"})})," feature before uploading your own documents.\nIf you ",(0,i.jsx)(n.a,{href:"#delete-knowledge",children:"delete these documents"}),", you won't be able to ask OpenRAG about itself and it's own functionality.\nIt is recommended that you keep these documents, and use ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"filters"})," to separate them from your other knowledge."]}),"\n",(0,i.jsx)(n.h2,{id:"auth",children:"OpenSearch authentication and document access"}),"\n",(0,i.jsxs)(n.p,{children:["When you ",(0,i.jsx)(n.a,{href:"/install",children:"install OpenRAG"}),", you can choose between two setup modes: ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," and ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),".\nThe mode you choose determines how OpenRAG authenticates with OpenSearch and controls access to documents:"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Basic Setup (no-auth mode)"}),": If you choose ",(0,i.jsx)(n.strong,{children:"Basic Setup"}),", then OpenRAG is installed in no-auth mode.\nThis mode uses one, anonymous JWT token for OpenSearch authentication.\nThere is no differentiation between users.\nAll users that access your OpenRAG instance can access all documents uploaded to your OpenSearch knowledge base."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Advanced Setup (OAuth mode)"}),": If you choose ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),", then OpenRAG is installed in OAuth mode.\nThis mode uses a unique JWT token for each OpenRAG user, and each document is tagged with user ownership. Documents are filtered by user owner.\nThis means users see only the documents that they uploaded or have access to."]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:["You can enable OAuth mode after installation.\nFor more information, see ",(0,i.jsx)(n.a,{href:"/ingestion#oauth-ingestion",children:"Ingest files with OAuth connectors"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"opensearch-indexes",children:"OpenSearch indexes"}),"\n",(0,i.jsxs)(n.p,{children:["An ",(0,i.jsx)(n.a,{href:"https://docs.opensearch.org/latest/getting-started/intro/#index",children:"OpenSearch index"})," is a collection of documents in an OpenSearch database."]}),"\n",(0,i.jsxs)(n.p,{children:["By default, all documents you upload to your OpenRAG knowledge base are stored in an index named ",(0,i.jsx)(n.code,{children:"documents"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["It is possible to change the index name by ",(0,i.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"editing the ingestion flow"}),".\nHowever, this can impact dependent processes, such as the ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"filters"})," and ",(0,i.jsx)(n.a,{href:"/chat",children:(0,i.jsx)(n.strong,{children:"Chat"})})," flow, that reference the ",(0,i.jsx)(n.code,{children:"documents"})," index by default.\nMake sure you edit other flows as needed to ensure all processes use the same index name."]}),"\n",(0,i.jsxs)(n.p,{children:["If you encounter errors or unexpected behavior after changing the index name, you can ",(0,i.jsx)(n.a,{href:"/agents#revert-a-built-in-flow-to-its-original-configuration",children:"revert the flows to their original configuration"}),", or ",(0,i.jsx)(n.a,{href:"/knowledge#delete-knowledge",children:"delete knowledge"})," to clear the existing documents from your knowledge base."]}),"\n",(0,i.jsx)(n.h2,{id:"knowledge-ingestion-settings",children:"Knowledge ingestion settings"}),"\n",(0,i.jsx)(n.admonition,{type:"warning",children:(0,i.jsx)(n.p,{children:"Knowledge ingestion settings apply to documents you upload after making the changes.\nDocuments uploaded before changing these settings aren't reprocessed."})}),"\n",(0,i.jsx)(n.p,{children:"After changing knowledge ingestion settings, you must determine if you need to reupload any documents to be consistent with the new settings."}),"\n",(0,i.jsx)(n.p,{children:"It isn't always necessary to reupload documents after changing knowledge ingestion settings.\nFor example, it is typical to upload some documents with OCR enabled and others without OCR enabled."}),"\n",(0,i.jsxs)(n.p,{children:["If needed, you can use ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"filters"})," to separate documents that you uploaded with different settings, such as different embedding models."]}),"\n",(0,i.jsx)(n.h3,{id:"set-the-embedding-model-and-dimensions",children:"Set the embedding model and dimensions"}),"\n",(0,i.jsxs)(n.p,{children:["When you ",(0,i.jsx)(n.a,{href:"/install",children:"install OpenRAG"}),", you select at least one embedding model during ",(0,i.jsx)(n.a,{href:"/install#application-onboarding",children:"application onboarding"}),".\nOpenRAG automatically detects and configures the appropriate vector dimensions for your selected embedding model, ensuring optimal search performance and compatibility."]}),"\n",(0,i.jsxs)(n.p,{children:["In the OpenRAG repository, you can find the complete list of supported models in ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/services/models_service.py",children:(0,i.jsx)(n.code,{children:"models_service.py"})})," and the corresponding vector dimensions in ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/config/settings.py",children:(0,i.jsx)(n.code,{children:"settings.py"})}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["During application onboarding, you can select from the supported models.\nThe default embedding dimension is ",(0,i.jsx)(n.code,{children:"1536"}),", and the default model is the OpenAI ",(0,i.jsx)(n.code,{children:"text-embedding-3-small"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["If you want to use an unsupported model, you must manually set the model in your ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG configuration"}),".\nIf you use an unsupported embedding model that doesn't have defined dimensions in ",(0,i.jsx)(n.code,{children:"settings.py"}),", then OpenRAG falls back to the default dimensions (1536) and logs a warning. OpenRAG's OpenSearch instance and flows continue to work, but ",(0,i.jsx)(n.a,{href:"https://www.ibm.com/think/topics/vector-search",children:"similarity search"})," quality can be affected if the actual model dimensions aren't 1536."]}),"\n",(0,i.jsxs)(n.p,{children:["To change the embedding model after onboarding, it is recommended that you modify the embedding model setting in the OpenRAG ",(0,i.jsx)(n.strong,{children:"Settings"})," page or in your ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG configuration"}),".\nThis will automatically update all relevant ",(0,i.jsx)(n.a,{href:"/agents",children:"OpenRAG flows"})," to use the new embedding model configuration."]}),"\n",(0,i.jsx)(n.h3,{id:"set-docling-parameters",children:"Set Docling parameters"}),"\n",(0,i.jsxs)(n.p,{children:["OpenRAG uses ",(0,i.jsx)(n.a,{href:"https://docling-project.github.io/docling/",children:"Docling"})," for document ingestion because it supports many file formats, processes tables and images well, and performs efficiently."]}),"\n",(0,i.jsxs)(n.p,{children:["When you ",(0,i.jsx)(n.a,{href:"/ingestion",children:"upload documents"}),", Docling processes the files, splits them into chunks, and stores them as separate, structured documents in your OpenSearch knowledge base."]}),"\n",(0,i.jsx)(n.p,{children:"You can use either Docling Serve or OpenRAG's built-in Docling ingestion pipeline to process documents."}),"\n",(0,i.jsxs)(a.A,{children:[(0,i.jsx)(d.A,{value:"serve",label:"Docling Serve ingestion",default:!0,children:(0,i.jsxs)(n.p,{children:["By default, OpenRAG uses ",(0,i.jsx)(n.a,{href:"https://github.com/docling-project/docling-serve",children:"Docling Serve"}),".\nThis means that OpenRAG starts a ",(0,i.jsx)(n.code,{children:"docling serve"})," process on your local machine and runs Docling ingestion through an API service."]})}),(0,i.jsxs)(d.A,{value:"docling",label:"Built-in Docling ingestion",children:[(0,i.jsxs)(n.p,{children:["If you want to use OpenRAG's built-in Docling ingestion pipeline instead of the separate Docling Serve service, set ",(0,i.jsx)(n.code,{children:"DISABLE_INGEST_WITH_LANGFLOW=true"})," in your ",(0,i.jsx)(n.a,{href:"/reference/configuration#document-processing",children:"OpenRAG environment variables"}),"."]}),(0,i.jsx)(n.p,{children:"The built-in pipeline uses the Docling processor directly instead of through the Docling Serve API."}),(0,i.jsxs)(n.p,{children:["For the underlying functionality, see ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/src/models/processors.py#L58",children:(0,i.jsx)(n.code,{children:"processors.py"})})," in the OpenRAG repository."]})]})]}),"\n",(0,i.jsxs)(n.p,{children:["To modify the Docling ingestion and embedding parameters, click ",(0,i.jsx)(r.A,{name:"Settings2","aria-hidden":"true"})," ",(0,i.jsx)(n.strong,{children:"Settings"})," in the OpenRAG user interface."]}),"\n",(0,i.jsx)(n.admonition,{type:"tip",children:(0,i.jsxs)(n.p,{children:["OpenRAG warns you if ",(0,i.jsx)(n.code,{children:"docling serve"})," isn't running.\nYou can ",(0,i.jsx)(n.a,{href:"/install#tui-container-management",children:"start and stop OpenRAG services"})," from the TUI main menu with ",(0,i.jsx)(n.strong,{children:"Start Native Services"})," or ",(0,i.jsx)(n.strong,{children:"Stop Native Services"}),"."]})}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Embedding model"}),": Select the model to use to generate vector embeddings for your documents."]}),"\n",(0,i.jsxs)(n.p,{children:["This is initially set during installation.\nThe recommended way to change this setting is in the OpenRAG ",(0,i.jsx)(n.strong,{children:"Settings"})," or your ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG configuration"}),".\nThis will automatically update all relevant ",(0,i.jsx)(n.a,{href:"/agents",children:"OpenRAG flows"})," to use the new embedding model configuration."]}),"\n",(0,i.jsxs)(n.p,{children:["If you uploaded documents prior to changing the embedding model, you can ",(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"create filters"})," to separate documents embedded with different models, or you can reupload all documents to regenerate embeddings with the new model.\nIf you want to use multiple embeddings models, similarity search (in the ",(0,i.jsx)(n.strong,{children:"Chat"}),") can take longer as it searching each model's embeddings separately."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Chunk size"}),": Set the number of characters for each text chunk when breaking down a file.\nLarger chunks yield more context per chunk, but can include irrelevant information. Smaller chunks yield more precise semantic search, but can lack context.\nThe default value is 1000 characters, which is usually a good balance between context and precision."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Chunk overlap"}),": Set the number of characters to overlap over chunk boundaries.\nUse larger overlap values for documents where context is most important. Use smaller overlap values for simpler documents or when optimization is most important.\nThe default value is 200 characters, which represents an overlap of 20 percent if the ",(0,i.jsx)(n.strong,{children:"Chunk size"})," is 1000. This is suitable for general use. For faster processing, decrease the overlap to approximately 10 percent. For more complex documents where you need to preserve context across chunks, increase it to approximately 40 percent."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Table Structure"}),": Enables Docling's ",(0,i.jsx)(n.a,{href:"https://docling-project.github.io/docling/reference/document_converter/",children:(0,i.jsx)(n.code,{children:"DocumentConverter"})})," tool for parsing tables. Instead of treating tables as plain text, tables are output as structured table data with preserved relationships and metadata. This option is enabled by default."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"OCR"}),": Enables Optical Character Recognition (OCR) processing when extracting text from images and ingesting scanned documents. This setting is best suited for processing text-based documents faster with Docling's ",(0,i.jsx)(n.a,{href:"https://docling-project.github.io/docling/reference/document_converter/",children:(0,i.jsx)(n.code,{children:"DocumentConverter"})}),". Images are ignored and not processed."]}),"\n",(0,i.jsx)(n.p,{children:"This option is disabled by default. Enabling OCR can slow ingestion performance."}),"\n",(0,i.jsxs)(n.p,{children:["If OpenRAG detects that the local machine is running on macOS, OpenRAG uses the ",(0,i.jsx)(n.a,{href:"https://www.piwheels.org/project/ocrmac/",children:"ocrmac"})," OCR engine. Other platforms use ",(0,i.jsx)(n.a,{href:"https://www.jaided.ai/easyocr/",children:"easyocr"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Picture descriptions"}),": Only applicable if ",(0,i.jsx)(n.strong,{children:"OCR"})," is enabled. Adds image descriptions generated by the ",(0,i.jsx)(n.a,{href:"https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct",children:(0,i.jsx)(n.code,{children:"SmolVLM-256M-Instruct"})})," model. Enabling picture descriptions can slow ingestion performance."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h3,{id:"set-the-local-documents-path",children:"Set the local documents path"}),"\n",(0,i.jsxs)(n.p,{children:["The default path for local uploads is the ",(0,i.jsx)(n.code,{children:"./openrag-documents"})," subdirectory in your OpenRAG installation directory. This is mounted to the ",(0,i.jsx)(n.code,{children:"/app/openrag-documents/"})," directory inside the OpenRAG container. Files added to the host or container directory are visible in both locations."]}),"\n",(0,i.jsxs)(n.p,{children:["To change this location, modify the ",(0,i.jsx)(n.strong,{children:"Documents Paths"})," variable in either the ",(0,i.jsxs)(n.a,{href:"/install#setup",children:[(0,i.jsx)(n.strong,{children:"Advanced Setup"})," menu"]})," or in the ",(0,i.jsx)(n.code,{children:".env"})," used by Docker Compose."]}),"\n",(0,i.jsx)(n.h2,{id:"delete-knowledge",children:"Delete knowledge"}),"\n",(0,i.jsxs)(n.p,{children:["To clear your entire knowledge base, delete the contents of the ",(0,i.jsx)(n.code,{children:"./opensearch-data"})," folder in your OpenRAG installation directory.\nThis is a destructive operation that cannot be undone."]}),"\n",(0,i.jsx)(n.h2,{id:"see-also",children:"See also"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/ingestion",children:"Ingest knowledge"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/knowledge-filters",children:"Filter knowledge"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/chat",children:"Chat with knowledge"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"})}),"\n"]})]})}function g(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(p,{...e})}):p(e)}}}]); \ No newline at end of file diff --git a/assets/js/27b4a875.565e61ee.js b/assets/js/27b4a875.565e61ee.js deleted file mode 100644 index db6d3ac7..00000000 --- a/assets/js/27b4a875.565e61ee.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[3207],{2361:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>d,contentTitle:()=>h,default:()=>u,frontMatter:()=>c,metadata:()=>s,toc:()=>p});const s=JSON.parse('{"id":"core-components/chat","title":"Chat in OpenRAG","description":"After you upload documents to your knowledge base, you can use the OpenRAG Chat feature to interact with your knowledge through natural language queries.","source":"@site/docs/core-components/chat.mdx","sourceDirName":"core-components","slug":"/chat","permalink":"/chat","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/chat.mdx","tags":[],"version":"current","frontMatter":{"title":"Chat in OpenRAG","slug":"/chat"},"sidebar":"tutorialSidebar","previous":{"title":"Filter knowledge","permalink":"/knowledge-filters"},"next":{"title":"Environment variables","permalink":"/reference/configuration"}}');var o=t(4848),r=t(8453),i=t(7733),a=(t(1470),t(9365),t(7224)),l=t(8401);const c={title:"Chat in OpenRAG",slug:"/chat"},h=void 0,d={},p=[{value:"OpenRAG OpenSearch Agent flow",id:"flow",level:2},{value:"Nudges",id:"nudges",level:2},{value:"Upload documents to the chat",id:"upload-documents-to-the-chat",level:2},...l.RM,{value:"Inspect tool calls and knowledge",id:"inspect-tool-calls-and-knowledge",level:2},{value:"Integrate OpenRAG chat into an application",id:"integrate-openrag-chat-into-an-application",level:2},...a.RM];function g(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",img:"img",li:"li",p:"p",strong:"strong",ul:"ul",...(0,r.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,o.jsxs)(o.Fragment,{children:[(0,o.jsxs)(n.p,{children:["After you ",(0,o.jsx)(n.a,{href:"/ingestion",children:"upload documents to your knowledge base"}),", you can use the OpenRAG ",(0,o.jsx)(i.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Chat"})," feature to interact with your knowledge through natural language queries."]}),"\n",(0,o.jsx)(n.admonition,{type:"tip",children:(0,o.jsxs)(n.p,{children:["Try chatting, uploading documents, and modifying chat settings in the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"quickstart"}),"."]})}),"\n",(0,o.jsx)(n.h2,{id:"flow",children:"OpenRAG OpenSearch Agent flow"}),"\n",(0,o.jsxs)(n.p,{children:["When you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," ",(0,o.jsx)(n.a,{href:"/agents",children:"flow"})," runs in the background to retrieve relevant information from your knowledge base and generate a response."]}),"\n",(0,o.jsxs)(n.p,{children:["If you ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", you'll see that it is comprised of eight components that work together to ingest chat messages, retrieve relevant information from your knowledge base, and then generate responses."]}),"\n",(0,o.jsx)(n.p,{children:(0,o.jsx)(n.img,{alt:"OpenRAG Open Search Agent Flow",src:t(3982).A+"",width:"4084",height:"2176"})}),"\n",(0,o.jsxs)(n.ul,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Chat Input"})," component"]}),": This component starts the flow when it receives a chat message. It is connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Input"})," port.\nWhen you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", your chat messages are passed to the ",(0,o.jsx)(n.strong,{children:"Chat Input"})," component, which then sends them to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component for processing."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/agents",children:[(0,o.jsx)(n.strong,{children:"Agent"})," component"]}),": This component orchestrates the entire flow by processing chat messages, searching the knowledge base, and organizing the retrieved information into a cohesive response.\nThe agent's general behavior is defined by the prompt in the ",(0,o.jsx)(n.strong,{children:"Agent Instructions"})," field and the model connected to the ",(0,o.jsx)(n.strong,{children:"Language Model"})," port.\nOne or more specialized tools can be attached to the ",(0,o.jsx)(n.strong,{children:"Tools"})," port to extend the agent's capabilities. In this case, there are two tools: ",(0,o.jsx)(n.strong,{children:"MCP Tools"})," and ",(0,o.jsx)(n.strong,{children:"OpenSearch"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["The ",(0,o.jsx)(n.strong,{children:"Agent"})," component is the star of this flow because it powers decision making, tool calling, and an LLM-driven conversational experience."]}),"\n",(0,o.jsxs)(s,{children:[(0,o.jsx)("summary",{children:"How do agents work?"}),(0,o.jsx)(n.p,{children:"Agents extend Large Language Models (LLMs) by integrating tools, which are functions that provide additional context and enable autonomous task execution. These integrations make agents more specialized and powerful than standalone LLMs."}),(0,o.jsx)(n.p,{children:"Whereas an LLM might generate acceptable, inert responses to general queries and tasks, an agent can leverage the integrated context and tools to provide more relevant responses and even take action. For example, you might create an agent that can access your company's documentation, repositories, and other resources to help your team with tasks that require knowledge of your specific products, customers, and code."}),(0,o.jsx)(n.p,{children:"Agents use LLMs as a reasoning engine to process input, determine which actions to take to address the query, and then generate a response. The response could be a typical text-based LLM response, or it could involve an action, like editing a file, running a script, or calling an external API."}),(0,o.jsx)(n.p,{children:"In an agentic context, tools are functions that the agent can run to perform tasks or access external resources. A function is wrapped as a Tool object with a common interface that the agent understands. Agents become aware of tools through tool registration, which is when the agent is provided a list of available tools typically at agent initialization. The Tool object's description tells the agent what the tool can do so that it can decide whether the tool is appropriate for a given request."})]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-models",children:[(0,o.jsx)(n.strong,{children:"Language Model"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Language Model"})," port, this component provides the base language model driver for the agent. The agent cannot function without a model because the model is used for general knowledge, reasoning, and generating responses."]}),"\n",(0,o.jsx)(n.p,{children:"Different models can change the style and content of the agent's responses, and some models might be better suited for certain tasks than others. If the agent doesn't seem to be handling requests well, try changing the model to see how the responses change. For example, fast models might be good for simple queries, but they might not have the depth of reasoning for complex, multi-faceted queries."}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/mcp-client",children:[(0,o.jsx)(n.strong,{children:"MCP Tools"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Tools"})," port, this component can be used to ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/mcp-server",children:"access any Model Context Protocol (MCP) server"})," and the MCP tools provided by that server. In this case, your OpenRAG Langflow instance's ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/concepts-flows#projects",children:(0,o.jsx)(n.strong,{children:"Starter Project"})})," is the MCP server, and the ",(0,o.jsxs)(n.a,{href:"/ingestion#url-flow",children:[(0,o.jsx)(n.strong,{children:"OpenSearch URL Ingestion"})," flow"]})," is the MCP tool.\nThis flow fetches content from URLs, and then stores the content in your OpenRAG OpenSearch knowledge base. By serving this flow as an MCP tool, the agent can selectively call this tool if a URL is detected in the chat input."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,o.jsx)(n.strong,{children:"OpenSearch"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Tools"})," port, this component lets the agent search your ",(0,o.jsx)(n.a,{href:"/knowledge",children:"OpenRAG OpenSearch knowledge base"}),". The agent might not use this database for every request; the agent uses this connection only if it decides that documents in your knowledge base are relevant to your query."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,o.jsx)(n.strong,{children:"Embedding Model"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component's ",(0,o.jsx)(n.strong,{children:"Embedding"})," port, this component generates embeddings from chat input that are used in ",(0,o.jsx)(n.a,{href:"https://www.ibm.com/think/topics/vector-search",children:"similarity search"})," to find content in your knowledge base that is relevant to the chat input. The agent uses this information to generate context-aware responses that are specialized for your data."]}),"\n",(0,o.jsxs)(n.p,{children:["It is critical that the embedding model used here matches the embedding model used when you ",(0,o.jsx)(n.a,{href:"/ingestion",children:"upload documents to your knowledge base"}),". Mismatched models and dimensions can degrade the quality of similarity search results causing the agent to retrieve irrelevant documents from your knowledge base."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Text Input"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component's ",(0,o.jsx)(n.strong,{children:"Search Filters"})," port, this component is populated with a Langflow global variable named ",(0,o.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"}),". If a global or chat-level ",(0,o.jsx)(n.a,{href:"/knowledge-filters",children:"knowledge filter"})," is set, then the variable contains the filter expression, which limits the documents that the agent can access in the knowledge base.\nIf no knowledge filter is set, then the ",(0,o.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"})," variable is empty, and the agent can access all documents in the knowledge base."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Chat Output"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Output"})," port, this component returns the agent's generated response as a chat message."]}),"\n"]}),"\n"]}),"\n",(0,o.jsx)(n.h2,{id:"nudges",children:"Nudges"}),"\n",(0,o.jsxs)(n.p,{children:["When you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Nudges"})," flow runs in the background to pull additional context from your knowledge base and chat history."]}),"\n",(0,o.jsxs)(n.p,{children:["Nudges appear as prompts in the chat.\nClick a nudge to accept it and provide the nudge's context to the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"})," agent (the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow)."]}),"\n",(0,o.jsxs)(n.p,{children:["Like OpenRAG's other built-in flows, you can ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", and you can customize it if you want to change the nudge behavior."]}),"\n",(0,o.jsx)(n.h2,{id:"upload-documents-to-the-chat",children:"Upload documents to the chat"}),"\n",(0,o.jsx)(l.Ay,{}),"\n",(0,o.jsx)(n.h2,{id:"inspect-tool-calls-and-knowledge",children:"Inspect tool calls and knowledge"}),"\n",(0,o.jsxs)(n.p,{children:["During the chat, you'll see information about the agent's process. For more detail, you can inspect individual tool calls. This is helpful for troubleshooting because it shows you how the agent used particular tools. For example, click ",(0,o.jsx)(i.A,{name:"Gear","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Function Call: search_documents (tool_call)"})," to view the log of tool calls made by the agent to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component."]}),"\n",(0,o.jsxs)(n.p,{children:["If documents in your knowledge base seem to be missing or interpreted incorrectly, see ",(0,o.jsx)(n.a,{href:"/ingestion#troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["If tool calls and knowledge appear normal, but the agent's responses seem off-topic or incorrect, consider changing the agent's language model or prompt, as explained in ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"}),"."]}),"\n",(0,o.jsx)(n.h2,{id:"integrate-openrag-chat-into-an-application",children:"Integrate OpenRAG chat into an application"}),"\n",(0,o.jsxs)(n.p,{children:["You can integrate OpenRAG flows into your applications using the ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/api-reference-api-examples",children:"Langflow API"}),".\nTo simplify this integration, you can get pre-configured code snippets directly from the embedded Langflow visual editor."]}),"\n",(0,o.jsxs)(n.p,{children:["The following example demonstrates how to generate and use code snippets for the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow:"]}),"\n",(0,o.jsx)(a.Ay,{})]})}function u(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(g,{...e})}):g(e)}},3982:(e,n,t)=>{t.d(n,{A:()=>s});const s=t.p+"assets/images/opensearch-agent-flow-f3b279e02425cd043002eb7749067108.png"},7224:(e,n,t)=>{t.d(n,{Ay:()=>h,RM:()=>l});var s=t(4848),o=t(8453),r=t(7733),i=t(1470),a=t(9365);const l=[];function c(e){const n={a:"a",code:"code",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Open the ",(0,s.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow in the Langflow visual editor: From the ",(0,s.jsx)(n.strong,{children:"Chat"})," window, click ",(0,s.jsx)(r.A,{name:"Settings2","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Settings"}),", click ",(0,s.jsx)(n.strong,{children:"Edit in Langflow"}),", and then click ",(0,s.jsx)(n.strong,{children:"Proceed"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Create a ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication",children:"Langflow API key"}),", which is a user-specific token required to send requests to the Langflow server.\nThis key doesn't grant access to OpenRAG."]}),"\n",(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["In the Langflow visual editor, click your user icon in the header, and then select ",(0,s.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:["Click ",(0,s.jsx)(n.strong,{children:"Langflow API Keys"}),", and then click ",(0,s.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Add New"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:["Name your key, and then click ",(0,s.jsx)(n.strong,{children:"Create API Key"}),"."]}),"\n",(0,s.jsx)(n.li,{children:"Copy the API key and store it securely."}),"\n",(0,s.jsxs)(n.li,{children:["Exit the Langflow ",(0,s.jsx)(n.strong,{children:"Settings"})," page to return to the visual editor."]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Share"}),", and then select ",(0,s.jsx)(n.strong,{children:"API access"})," to get pregenerated code snippets that call the Langflow API and run the flow."]}),"\n",(0,s.jsxs)(n.p,{children:["These code snippets construct API requests with your Langflow server URL (",(0,s.jsx)(n.code,{children:"LANGFLOW_SERVER_ADDRESS"}),"), the flow to run (",(0,s.jsx)(n.code,{children:"FLOW_ID"}),"), required headers (",(0,s.jsx)(n.code,{children:"LANGFLOW_API_KEY"}),", ",(0,s.jsx)(n.code,{children:"Content-Type"}),"), and a payload containing the required inputs to run the flow, including a default chat input message."]}),"\n",(0,s.jsx)(n.p,{children:"In production, you would modify the inputs to suit your application logic. For example, you could replace the default chat input message with dynamic user input."}),"\n",(0,s.jsxs)(i.A,{children:[(0,s.jsx)(a.A,{value:"python",label:"Python",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:'import requests\nimport os\nimport uuid\n\napi_key = \'LANGFLOW_API_KEY\'\nurl = "http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID" # The complete API endpoint URL for this flow\n\n# Request payload configuration\npayload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n}\npayload["session_id"] = str(uuid.uuid4())\n\nheaders = {"x-api-key": api_key}\n\ntry:\n # Send API request\n response = requests.request("POST", url, json=payload, headers=headers)\n response.raise_for_status() # Raise exception for bad status codes\n\n # Print response\n print(response.text)\n\nexcept requests.exceptions.RequestException as e:\n print(f"Error making API request: {e}")\nexcept ValueError as e:\n print(f"Error parsing response: {e}")\n'})})}),(0,s.jsx)(a.A,{value:"typescript",label:"TypeScript",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-typescript",children:'const crypto = require(\'crypto\');\nconst apiKey = \'LANGFLOW_API_KEY\';\nconst payload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n};\npayload.session_id = crypto.randomUUID();\n\nconst options = {\n method: \'POST\',\n headers: {\n \'Content-Type\': \'application/json\',\n "x-api-key": apiKey\n },\n body: JSON.stringify(payload)\n};\n\nfetch(\'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID\', options)\n .then(response => response.json())\n .then(response => console.warn(response))\n .catch(err => console.error(err));\n'})})}),(0,s.jsx)(a.A,{value:"curl",label:"curl",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-bash",children:'curl --request POST \\\n --url \'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID?stream=false\' \\\n --header \'Content-Type: application/json\' \\\n --header "x-api-key: LANGFLOW_API_KEY" \\\n --data \'{\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n }\'\n'})})})]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Copy your preferred snippet, and then run it:"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Python"}),": Paste the snippet into a ",(0,s.jsx)(n.code,{children:".py"})," file, save it, and then run it with ",(0,s.jsx)(n.code,{children:"python filename.py"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"TypeScript"}),": Paste the snippet into a ",(0,s.jsx)(n.code,{children:".ts"})," file, save it, and then run it with ",(0,s.jsx)(n.code,{children:"ts-node filename.ts"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"curl"}),": Paste and run snippet directly in your terminal."]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,s.jsx)(n.p,{children:"If the request is successful, the response includes many details about the flow run, including the session ID, inputs, outputs, components, durations, and more."}),"\n",(0,s.jsxs)(n.p,{children:["In production, you won't pass the raw response to the user in its entirety.\nInstead, you extract and reformat relevant fields for different use cases, as demonstrated in the ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/quickstart#extract-data-from-the-response",children:"Langflow quickstart"}),".\nFor example, you could pass the chat output text to a front-end user-facing application, and store specific fields in logs and backend data stores for monitoring, chat history, or analytics.\nYou could also pass the output from one flow as input to another flow."]})]})}function h(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(c,{...e})}):c(e)}},8401:(e,n,t)=>{t.d(n,{Ay:()=>l,RM:()=>i});var s=t(4848),o=t(8453),r=t(7733);const i=[];function a(e){const n={p:"p",strong:"strong",...(0,o.R)(),...e.components};return(0,s.jsxs)(n.p,{children:["When using the OpenRAG ",(0,s.jsx)(n.strong,{children:"Chat"}),", click ",(0,s.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," in the chat input field to upload a file to the current chat session.\nFiles added this way are processed and made available to the agent for the current conversation only.\nThese files aren't stored in the knowledge base permanently."]})}function l(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(a,{...e})}):a(e)}}}]); \ No newline at end of file diff --git a/assets/js/27b4a875.8de9f81c.js b/assets/js/27b4a875.8de9f81c.js new file mode 100644 index 00000000..e9e45d4a --- /dev/null +++ b/assets/js/27b4a875.8de9f81c.js @@ -0,0 +1 @@ +"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[3207],{2361:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>d,contentTitle:()=>h,default:()=>u,frontMatter:()=>c,metadata:()=>s,toc:()=>p});const s=JSON.parse('{"id":"core-components/chat","title":"Chat in OpenRAG","description":"After you upload documents to your knowledge base, you can use the OpenRAG Chat feature to interact with your knowledge through natural language queries.","source":"@site/docs/core-components/chat.mdx","sourceDirName":"core-components","slug":"/chat","permalink":"/chat","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/chat.mdx","tags":[],"version":"current","frontMatter":{"title":"Chat in OpenRAG","slug":"/chat"},"sidebar":"tutorialSidebar","previous":{"title":"Filter knowledge","permalink":"/knowledge-filters"},"next":{"title":"Environment variables","permalink":"/reference/configuration"}}');var o=t(4848),r=t(8453),a=t(7733),i=(t(1470),t(9365),t(7224)),l=t(8401);const c={title:"Chat in OpenRAG",slug:"/chat"},h=void 0,d={},p=[{value:"OpenRAG OpenSearch Agent flow",id:"flow",level:2},{value:"Nudges",id:"nudges",level:2},{value:"Upload documents to the chat",id:"upload-documents-to-the-chat",level:2},...l.RM,{value:"Inspect tool calls and knowledge",id:"inspect-tool-calls-and-knowledge",level:2},{value:"Integrate OpenRAG chat into an application",id:"integrate-openrag-chat-into-an-application",level:2},...i.RM];function g(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",img:"img",li:"li",p:"p",strong:"strong",ul:"ul",...(0,r.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,o.jsxs)(o.Fragment,{children:[(0,o.jsxs)(n.p,{children:["After you ",(0,o.jsx)(n.a,{href:"/ingestion",children:"upload documents to your knowledge base"}),", you can use the OpenRAG ",(0,o.jsx)(a.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Chat"})," feature to interact with your knowledge through natural language queries."]}),"\n",(0,o.jsxs)(n.p,{children:["The OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"})," uses an LLM-powered agent to understand your queries, retrieve relevant information from your knowledge base, and generate context-aware responses.\nThe agent can also fetch information from URLs and new documents that you provide during the chat session.\nTo limit the knowledge available to the agent, use ",(0,o.jsx)(n.a,{href:"/knowledge-filters",children:"filters"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["The agent can call specialized Model Context Protocol (MCP) tools to extend its capabilities.\nTo add or change the available tools, you must edit the ",(0,o.jsxs)(n.a,{href:"#flow",children:[(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow"]}),"."]}),"\n",(0,o.jsx)(n.admonition,{type:"tip",children:(0,o.jsxs)(n.p,{children:["Try chatting, uploading documents, and modifying chat settings in the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"quickstart"}),"."]})}),"\n",(0,o.jsx)(n.h2,{id:"flow",children:"OpenRAG OpenSearch Agent flow"}),"\n",(0,o.jsxs)(n.p,{children:["When you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow runs in the background to retrieve relevant information from your knowledge base and generate a response."]}),"\n",(0,o.jsxs)(n.p,{children:["If you ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", you'll see that it is comprised of eight components that work together to ingest chat messages, retrieve relevant information from your knowledge base, and then generate responses.\nWhen you inspect this flow, you can edit the components to customize the agent's behavior."]}),"\n",(0,o.jsx)(n.p,{children:(0,o.jsx)(n.img,{alt:"OpenRAG Open Search Agent Flow",src:t(3982).A+"",width:"4084",height:"2176"})}),"\n",(0,o.jsxs)(n.ul,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Chat Input"})," component"]}),": This component starts the flow when it receives a chat message. It is connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Input"})," port.\nWhen you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", your chat messages are passed to the ",(0,o.jsx)(n.strong,{children:"Chat Input"})," component, which then sends them to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component for processing."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/agents",children:[(0,o.jsx)(n.strong,{children:"Agent"})," component"]}),": This component orchestrates the entire flow by processing chat messages, searching the knowledge base, and organizing the retrieved information into a cohesive response.\nThe agent's general behavior is defined by the prompt in the ",(0,o.jsx)(n.strong,{children:"Agent Instructions"})," field and the model connected to the ",(0,o.jsx)(n.strong,{children:"Language Model"})," port.\nOne or more specialized tools can be attached to the ",(0,o.jsx)(n.strong,{children:"Tools"})," port to extend the agent's capabilities. In this case, there are two tools: ",(0,o.jsx)(n.strong,{children:"MCP Tools"})," and ",(0,o.jsx)(n.strong,{children:"OpenSearch"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["The ",(0,o.jsx)(n.strong,{children:"Agent"})," component is the star of this flow because it powers decision making, tool calling, and an LLM-driven conversational experience."]}),"\n",(0,o.jsxs)(s,{children:[(0,o.jsx)("summary",{children:"How do agents work?"}),(0,o.jsx)(n.p,{children:"Agents extend Large Language Models (LLMs) by integrating tools, which are functions that provide additional context and enable autonomous task execution. These integrations make agents more specialized and powerful than standalone LLMs."}),(0,o.jsx)(n.p,{children:"Whereas an LLM might generate acceptable, inert responses to general queries and tasks, an agent can leverage the integrated context and tools to provide more relevant responses and even take action. For example, you might create an agent that can access your company's documentation, repositories, and other resources to help your team with tasks that require knowledge of your specific products, customers, and code."}),(0,o.jsx)(n.p,{children:"Agents use LLMs as a reasoning engine to process input, determine which actions to take to address the query, and then generate a response. The response could be a typical text-based LLM response, or it could involve an action, like editing a file, running a script, or calling an external API."}),(0,o.jsx)(n.p,{children:"In an agentic context, tools are functions that the agent can run to perform tasks or access external resources. A function is wrapped as a Tool object with a common interface that the agent understands. Agents become aware of tools through tool registration, which is when the agent is provided a list of available tools typically at agent initialization. The Tool object's description tells the agent what the tool can do so that it can decide whether the tool is appropriate for a given request."})]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-models",children:[(0,o.jsx)(n.strong,{children:"Language Model"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Language Model"})," port, this component provides the base language model driver for the agent. The agent cannot function without a model because the model is used for general knowledge, reasoning, and generating responses."]}),"\n",(0,o.jsx)(n.p,{children:"Different models can change the style and content of the agent's responses, and some models might be better suited for certain tasks than others. If the agent doesn't seem to be handling requests well, try changing the model to see how the responses change. For example, fast models might be good for simple queries, but they might not have the depth of reasoning for complex, multi-faceted queries."}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/mcp-client",children:[(0,o.jsx)(n.strong,{children:"MCP Tools"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Tools"})," port, this component can be used to ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/mcp-server",children:"access any MCP server"})," and the MCP tools provided by that server. In this case, your OpenRAG Langflow instance's ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/concepts-flows#projects",children:(0,o.jsx)(n.strong,{children:"Starter Project"})})," is the MCP server, and the ",(0,o.jsxs)(n.a,{href:"/ingestion#url-flow",children:[(0,o.jsx)(n.strong,{children:"OpenSearch URL Ingestion"})," flow"]})," is the MCP tool.\nThis flow fetches content from URLs, and then stores the content in your OpenRAG OpenSearch knowledge base. By serving this flow as an MCP tool, the agent can selectively call this tool if a URL is detected in the chat input."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,o.jsx)(n.strong,{children:"OpenSearch"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Tools"})," port, this component lets the agent search your ",(0,o.jsx)(n.a,{href:"/knowledge",children:"OpenRAG OpenSearch knowledge base"}),". The agent might not use this database for every request; the agent uses this connection only if it decides that documents in your knowledge base are relevant to your query."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,o.jsx)(n.strong,{children:"Embedding Model"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component's ",(0,o.jsx)(n.strong,{children:"Embedding"})," port, this component generates embeddings from chat input that are used in ",(0,o.jsx)(n.a,{href:"https://www.ibm.com/think/topics/vector-search",children:"similarity search"})," to find content in your knowledge base that is relevant to the chat input. The agent uses this information to generate context-aware responses that are specialized for your data."]}),"\n",(0,o.jsxs)(n.p,{children:["It is critical that the embedding model used here matches the embedding model used when you ",(0,o.jsx)(n.a,{href:"/ingestion",children:"upload documents to your knowledge base"}),". Mismatched models and dimensions can degrade the quality of similarity search results causing the agent to retrieve irrelevant documents from your knowledge base."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Text Input"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component's ",(0,o.jsx)(n.strong,{children:"Search Filters"})," port, this component is populated with a Langflow global variable named ",(0,o.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"}),". If a global or chat-level ",(0,o.jsx)(n.a,{href:"/knowledge-filters",children:"knowledge filter"})," is set, then the variable contains the filter expression, which limits the documents that the agent can access in the knowledge base.\nIf no knowledge filter is set, then the ",(0,o.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"})," variable is empty, and the agent can access all documents in the knowledge base."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Chat Output"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Output"})," port, this component returns the agent's generated response as a chat message."]}),"\n"]}),"\n"]}),"\n",(0,o.jsx)(n.h2,{id:"nudges",children:"Nudges"}),"\n",(0,o.jsxs)(n.p,{children:["When you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Nudges"})," flow runs in the background to pull additional context from your knowledge base and chat history."]}),"\n",(0,o.jsxs)(n.p,{children:["Nudges appear as prompts in the chat.\nClick a nudge to accept it and provide the nudge's context to the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"})," agent (the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow)."]}),"\n",(0,o.jsxs)(n.p,{children:["Like OpenRAG's other built-in flows, you can ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", and you can customize it if you want to change the nudge behavior."]}),"\n",(0,o.jsx)(n.h2,{id:"upload-documents-to-the-chat",children:"Upload documents to the chat"}),"\n",(0,o.jsx)(l.Ay,{}),"\n",(0,o.jsx)(n.h2,{id:"inspect-tool-calls-and-knowledge",children:"Inspect tool calls and knowledge"}),"\n",(0,o.jsxs)(n.p,{children:["During the chat, you'll see information about the agent's process. For more detail, you can inspect individual tool calls. This is helpful for troubleshooting because it shows you how the agent used particular tools. For example, click ",(0,o.jsx)(a.A,{name:"Gear","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Function Call: search_documents (tool_call)"})," to view the log of tool calls made by the agent to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component."]}),"\n",(0,o.jsxs)(n.p,{children:["If documents in your knowledge base seem to be missing or interpreted incorrectly, see ",(0,o.jsx)(n.a,{href:"/ingestion#troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["If tool calls and knowledge appear normal, but the agent's responses seem off-topic or incorrect, consider changing the agent's language model or prompt, as explained in ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"}),"."]}),"\n",(0,o.jsx)(n.h2,{id:"integrate-openrag-chat-into-an-application",children:"Integrate OpenRAG chat into an application"}),"\n",(0,o.jsxs)(n.p,{children:["You can integrate OpenRAG flows into your applications using the ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/api-reference-api-examples",children:"Langflow API"}),".\nTo simplify this integration, you can get pre-configured code snippets directly from the embedded Langflow visual editor."]}),"\n",(0,o.jsxs)(n.p,{children:["The following example demonstrates how to generate and use code snippets for the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow:"]}),"\n",(0,o.jsx)(i.Ay,{})]})}function u(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(g,{...e})}):g(e)}},3982:(e,n,t)=>{t.d(n,{A:()=>s});const s=t.p+"assets/images/opensearch-agent-flow-f3b279e02425cd043002eb7749067108.png"},7224:(e,n,t)=>{t.d(n,{Ay:()=>h,RM:()=>l});var s=t(4848),o=t(8453),r=t(7733),a=t(1470),i=t(9365);const l=[];function c(e){const n={a:"a",code:"code",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Open the ",(0,s.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow in the Langflow visual editor: From the ",(0,s.jsx)(n.strong,{children:"Chat"})," window, click ",(0,s.jsx)(r.A,{name:"Settings2","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Settings"}),", click ",(0,s.jsx)(n.strong,{children:"Edit in Langflow"}),", and then click ",(0,s.jsx)(n.strong,{children:"Proceed"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Create a ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication",children:"Langflow API key"}),", which is a user-specific token required to send requests to the Langflow server.\nThis key doesn't grant access to OpenRAG."]}),"\n",(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["In the Langflow visual editor, click your user icon in the header, and then select ",(0,s.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:["Click ",(0,s.jsx)(n.strong,{children:"Langflow API Keys"}),", and then click ",(0,s.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Add New"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:["Name your key, and then click ",(0,s.jsx)(n.strong,{children:"Create API Key"}),"."]}),"\n",(0,s.jsx)(n.li,{children:"Copy the API key and store it securely."}),"\n",(0,s.jsxs)(n.li,{children:["Exit the Langflow ",(0,s.jsx)(n.strong,{children:"Settings"})," page to return to the visual editor."]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Share"}),", and then select ",(0,s.jsx)(n.strong,{children:"API access"})," to get pregenerated code snippets that call the Langflow API and run the flow."]}),"\n",(0,s.jsxs)(n.p,{children:["These code snippets construct API requests with your Langflow server URL (",(0,s.jsx)(n.code,{children:"LANGFLOW_SERVER_ADDRESS"}),"), the flow to run (",(0,s.jsx)(n.code,{children:"FLOW_ID"}),"), required headers (",(0,s.jsx)(n.code,{children:"LANGFLOW_API_KEY"}),", ",(0,s.jsx)(n.code,{children:"Content-Type"}),"), and a payload containing the required inputs to run the flow, including a default chat input message."]}),"\n",(0,s.jsx)(n.p,{children:"In production, you would modify the inputs to suit your application logic. For example, you could replace the default chat input message with dynamic user input."}),"\n",(0,s.jsxs)(a.A,{children:[(0,s.jsx)(i.A,{value:"python",label:"Python",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:'import requests\nimport os\nimport uuid\n\napi_key = \'LANGFLOW_API_KEY\'\nurl = "http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID" # The complete API endpoint URL for this flow\n\n# Request payload configuration\npayload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n}\npayload["session_id"] = str(uuid.uuid4())\n\nheaders = {"x-api-key": api_key}\n\ntry:\n # Send API request\n response = requests.request("POST", url, json=payload, headers=headers)\n response.raise_for_status() # Raise exception for bad status codes\n\n # Print response\n print(response.text)\n\nexcept requests.exceptions.RequestException as e:\n print(f"Error making API request: {e}")\nexcept ValueError as e:\n print(f"Error parsing response: {e}")\n'})})}),(0,s.jsx)(i.A,{value:"typescript",label:"TypeScript",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-typescript",children:'const crypto = require(\'crypto\');\nconst apiKey = \'LANGFLOW_API_KEY\';\nconst payload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n};\npayload.session_id = crypto.randomUUID();\n\nconst options = {\n method: \'POST\',\n headers: {\n \'Content-Type\': \'application/json\',\n "x-api-key": apiKey\n },\n body: JSON.stringify(payload)\n};\n\nfetch(\'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID\', options)\n .then(response => response.json())\n .then(response => console.warn(response))\n .catch(err => console.error(err));\n'})})}),(0,s.jsx)(i.A,{value:"curl",label:"curl",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-bash",children:'curl --request POST \\\n --url \'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID?stream=false\' \\\n --header \'Content-Type: application/json\' \\\n --header "x-api-key: LANGFLOW_API_KEY" \\\n --data \'{\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n }\'\n'})})})]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Copy your preferred snippet, and then run it:"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Python"}),": Paste the snippet into a ",(0,s.jsx)(n.code,{children:".py"})," file, save it, and then run it with ",(0,s.jsx)(n.code,{children:"python filename.py"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"TypeScript"}),": Paste the snippet into a ",(0,s.jsx)(n.code,{children:".ts"})," file, save it, and then run it with ",(0,s.jsx)(n.code,{children:"ts-node filename.ts"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"curl"}),": Paste and run snippet directly in your terminal."]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,s.jsx)(n.p,{children:"If the request is successful, the response includes many details about the flow run, including the session ID, inputs, outputs, components, durations, and more."}),"\n",(0,s.jsxs)(n.p,{children:["In production, you won't pass the raw response to the user in its entirety.\nInstead, you extract and reformat relevant fields for different use cases, as demonstrated in the ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/quickstart#extract-data-from-the-response",children:"Langflow quickstart"}),".\nFor example, you could pass the chat output text to a front-end user-facing application, and store specific fields in logs and backend data stores for monitoring, chat history, or analytics.\nYou could also pass the output from one flow as input to another flow."]})]})}function h(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(c,{...e})}):c(e)}},8401:(e,n,t)=>{t.d(n,{Ay:()=>l,RM:()=>a});var s=t(4848),o=t(8453),r=t(7733);const a=[];function i(e){const n={p:"p",strong:"strong",...(0,o.R)(),...e.components};return(0,s.jsxs)(n.p,{children:["When using the OpenRAG ",(0,s.jsx)(n.strong,{children:"Chat"}),", click ",(0,s.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," in the chat input field to upload a file to the current chat session.\nFiles added this way are processed and made available to the agent for the current conversation only.\nThese files aren't stored in the knowledge base permanently."]})}function l(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}}}]); \ No newline at end of file diff --git a/assets/js/71478a5d.57b0036a.js b/assets/js/71478a5d.57b0036a.js new file mode 100644 index 00000000..349e520b --- /dev/null +++ b/assets/js/71478a5d.57b0036a.js @@ -0,0 +1 @@ +"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[5490],{6195:(e,n,l)=>{l.r(n),l.d(n,{assets:()=>d,contentTitle:()=>a,default:()=>u,frontMatter:()=>o,metadata:()=>t,toc:()=>c});const t=JSON.parse('{"id":"core-components/knowledge-filters","title":"Filter knowledge","description":"OpenRAG\'s knowledge filters help you organize and manage your knowledge base by creating pre-defined views of your documents.","source":"@site/docs/core-components/knowledge-filters.mdx","sourceDirName":"core-components","slug":"/knowledge-filters","permalink":"/knowledge-filters","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/knowledge-filters.mdx","tags":[],"version":"current","frontMatter":{"title":"Filter knowledge","slug":"/knowledge-filters"},"sidebar":"tutorialSidebar","previous":{"title":"Ingest knowledge","permalink":"/ingestion"},"next":{"title":"Chat","permalink":"/chat"}}');var s=l(4848),r=l(8453),i=l(7733);l(1470),l(9365);const o={title:"Filter knowledge",slug:"/knowledge-filters"},a=void 0,d={},c=[{value:"Built-in filters",id:"built-in-filters",level:2},{value:"Create a filter",id:"create-a-filter",level:2},{value:"Apply a filter",id:"apply-a-filter",level:2},{value:"Delete a filter",id:"delete-a-filter",level:2}];function h(e){const n={a:"a",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",ul:"ul",...(0,r.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsxs)(n.p,{children:["OpenRAG's knowledge filters help you organize and manage your ",(0,s.jsx)(n.a,{href:"/knowledge",children:"knowledge base"})," by creating pre-defined views of your documents."]}),"\n",(0,s.jsx)(n.p,{children:"Each knowledge filter captures a specific subset of documents based on given a search query and filters."}),"\n",(0,s.jsx)(n.p,{children:"Knowledge filters can be used with different OpenRAG functionality.\nFor example, knowledge filters can help agents access large knowledge bases efficiently by narrowing the scope of documents that you want the agent to use."}),"\n",(0,s.jsx)(n.h2,{id:"built-in-filters",children:"Built-in filters"}),"\n",(0,s.jsxs)(n.p,{children:["When you install OpenRAG, it automatically creates an ",(0,s.jsx)(n.strong,{children:"OpenRAG docs"})," filter that includes OpenRAG's default documents.\nThese documents provide information about OpenRAG itself and help you learn how to use OpenRAG."]}),"\n",(0,s.jsxs)(n.p,{children:["When you use the OpenRAG ",(0,s.jsx)(i.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Chat"}),", ",(0,s.jsxs)(n.a,{href:"#apply-a-filter",children:["apply the ",(0,s.jsx)(n.strong,{children:"OpenRAG docs"})," filter"]})," if you want to ask questions about OpenRAG's features and functionality.\nThis limits the agent's context to the default OpenRAG documentation rather than all documents in your knowledge base."]}),"\n",(0,s.jsx)(n.p,{children:"After uploading your own documents, it is recommended that you create your own filters to organize your documents effectively and separate them from the default OpenRAG documents."}),"\n",(0,s.jsx)(n.h2,{id:"create-a-filter",children:"Create a filter"}),"\n",(0,s.jsx)(n.p,{children:"To create a knowledge filter, do the following:"}),"\n",(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Knowledge"}),", and then click ",(0,s.jsx)(i.A,{name:"Plus","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge Filters"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Enter a ",(0,s.jsx)(n.strong,{children:"Name"})," and ",(0,s.jsx)(n.strong,{children:"Description"}),", and then click ",(0,s.jsx)(n.strong,{children:"Create Filter"}),"."]}),"\n",(0,s.jsx)(n.p,{children:"By default, new filters match all documents in your knowledge base.\nModify the filter to customize it."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["To modify the filter, click ",(0,s.jsx)(i.A,{name:"Library","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge"}),", and then click your new filter. You can edit the following settings:"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Search Query"}),": Enter text for semantic search, such as ",(0,s.jsx)(n.code,{children:"financial reports from Q4"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Data Sources"}),": Select specific data sources or folders to include."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Document Types"}),": Filter by file type."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Owners"}),": Filter by the user that uploaded the documents."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Connectors"}),": Filter by ",(0,s.jsx)(n.a,{href:"/ingestion",children:"upload source"}),", such as the local file system or a Google Drive OAuth connector."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Response Limit"}),": Set the maximum number of results to return from the knowledge base. The default is ",(0,s.jsx)(n.code,{children:"10"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Score Threshold"}),": Set the minimum relevance score for similarity search. The default score is ",(0,s.jsx)(n.code,{children:"0"}),"."]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["To save your changes, click ",(0,s.jsx)(n.strong,{children:"Update Filter"}),"."]}),"\n"]}),"\n"]}),"\n",(0,s.jsx)(n.h2,{id:"apply-a-filter",children:"Apply a filter"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Apply a global filter"}),": Click ",(0,s.jsx)(i.A,{name:"Library","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge"}),", and then enable the toggle next to your preferred filter. Only one filter can be the global filter. The global filter applies to all chat sessions."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Apply a chat filter"}),": In the ",(0,s.jsx)(i.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Chat"})," window, click ",(0,s.jsx)(i.A,{name:"Funnel","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Filter"}),", and then select the filter to apply.\nChat filters apply to one chat session only."]}),"\n"]}),"\n"]}),"\n",(0,s.jsx)(n.h2,{id:"delete-a-filter",children:"Delete a filter"}),"\n",(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(i.A,{name:"Library","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Click the filter that you want to delete."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Delete Filter"}),"."]}),"\n"]}),"\n"]})]})}function u(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(h,{...e})}):h(e)}}}]); \ No newline at end of file diff --git a/assets/js/71478a5d.7641035a.js b/assets/js/71478a5d.7641035a.js deleted file mode 100644 index 1e3ebf8e..00000000 --- a/assets/js/71478a5d.7641035a.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[5490],{6195:(e,n,l)=>{l.r(n),l.d(n,{assets:()=>c,contentTitle:()=>d,default:()=>g,frontMatter:()=>o,metadata:()=>r,toc:()=>a});const r=JSON.parse('{"id":"core-components/knowledge-filters","title":"Filter knowledge","description":"OpenRAG\'s knowledge filters help you organize and manage your knowledge base by creating pre-defined views of your documents.","source":"@site/docs/core-components/knowledge-filters.mdx","sourceDirName":"core-components","slug":"/knowledge-filters","permalink":"/knowledge-filters","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/knowledge-filters.mdx","tags":[],"version":"current","frontMatter":{"title":"Filter knowledge","slug":"/knowledge-filters"},"sidebar":"tutorialSidebar","previous":{"title":"Ingest knowledge","permalink":"/ingestion"},"next":{"title":"Chat","permalink":"/chat"}}');var s=l(4848),i=l(8453),t=l(7733);l(1470),l(9365);const o={title:"Filter knowledge",slug:"/knowledge-filters"},d=void 0,c={},a=[{value:"Create a filter",id:"create-a-filter",level:2},{value:"Apply a filter",id:"apply-a-filter",level:2},{value:"Delete a filter",id:"delete-a-filter",level:2}];function h(e){const n={a:"a",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",ul:"ul",...(0,i.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsxs)(n.p,{children:["OpenRAG's knowledge filters help you organize and manage your ",(0,s.jsx)(n.a,{href:"/knowledge",children:"knowledge base"})," by creating pre-defined views of your documents."]}),"\n",(0,s.jsx)(n.p,{children:"Each knowledge filter captures a specific subset of documents based on given a search query and filters."}),"\n",(0,s.jsx)(n.p,{children:"Knowledge filters can be used with different OpenRAG functionality.\nFor example, knowledge filters can help agents access large knowledge bases efficiently by narrowing the scope of documents that you want the agent to use."}),"\n",(0,s.jsx)(n.h2,{id:"create-a-filter",children:"Create a filter"}),"\n",(0,s.jsx)(n.p,{children:"To create a knowledge filter, do the following:"}),"\n",(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Knowledge"}),", and then click ",(0,s.jsx)(t.A,{name:"Plus","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge Filters"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Enter a ",(0,s.jsx)(n.strong,{children:"Name"})," and ",(0,s.jsx)(n.strong,{children:"Description"}),", and then click ",(0,s.jsx)(n.strong,{children:"Create Filter"}),"."]}),"\n",(0,s.jsx)(n.p,{children:"By default, new filters match all documents in your knowledge base.\nModify the filter to customize it."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["To modify the filter, click ",(0,s.jsx)(t.A,{name:"Library","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge"}),", and then click your new filter. You can edit the following settings:"]}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Search Query"}),": Enter text for semantic search, such as ",(0,s.jsx)(n.code,{children:"financial reports from Q4"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Data Sources"}),": Select specific data sources or folders to include."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Document Types"}),": Filter by file type."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Owners"}),": Filter by the user that uploaded the documents."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Connectors"}),": Filter by ",(0,s.jsx)(n.a,{href:"/ingestion",children:"upload source"}),", such as the local file system or a Google Drive OAuth connector."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Response Limit"}),": Set the maximum number of results to return from the knowledge base. The default is ",(0,s.jsx)(n.code,{children:"10"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Score Threshold"}),": Set the minimum relevance score for similarity search. The default score is ",(0,s.jsx)(n.code,{children:"0"}),"."]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["To save your changes, click ",(0,s.jsx)(n.strong,{children:"Update Filter"}),"."]}),"\n"]}),"\n"]}),"\n",(0,s.jsx)(n.h2,{id:"apply-a-filter",children:"Apply a filter"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Apply a global filter"}),": Click ",(0,s.jsx)(t.A,{name:"Library","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge"}),", and then enable the toggle next to your preferred filter. Only one filter can be the global filter. The global filter applies to all chat sessions."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Apply a chat filter"}),": In the ",(0,s.jsx)(t.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Chat"})," window, click ",(0,s.jsx)(t.A,{name:"Funnel","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Filter"}),", and then select the filter to apply.\nChat filters apply to one chat session only."]}),"\n"]}),"\n"]}),"\n",(0,s.jsx)(n.h2,{id:"delete-a-filter",children:"Delete a filter"}),"\n",(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(t.A,{name:"Library","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Knowledge"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Click the filter that you want to delete."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Delete Filter"}),"."]}),"\n"]}),"\n"]})]})}function g(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(h,{...e})}):h(e)}}}]); \ No newline at end of file diff --git a/assets/js/749371cc.7bf165c7.js b/assets/js/749371cc.7bf165c7.js new file mode 100644 index 00000000..8f5a3966 --- /dev/null +++ b/assets/js/749371cc.7bf165c7.js @@ -0,0 +1 @@ +"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[2272],{3656:(e,n,s)=>{s.d(n,{Ay:()=>p,RM:()=>d});var o=s(4848),r=s(8453),l=s(7733),i=s(1470),t=s(9365);function a(e){const n={a:"a",code:"code",li:"li",ol:"ol",p:"p",strong:"strong",...(0,r.R)(),...e.components};return(0,o.jsxs)(o.Fragment,{children:[(0,o.jsx)(n.p,{children:"Using Ollama for your OpenRAG language model provider offers greater flexibility and configuration, but can also be overwhelming to start.\nThese recommendations are a reasonable starting point for users with at least one GPU and experience running LLMs locally."}),"\n",(0,o.jsxs)(n.p,{children:["For best performance, OpenRAG recommends OpenAI's ",(0,o.jsx)(n.code,{children:"gpt-oss:20b"})," language model. However, this model uses 16GB of RAM, so consider using Ollama Cloud or running Ollama on a remote machine."]}),"\n",(0,o.jsxs)(n.p,{children:["For generating embeddings, OpenRAG recommends the ",(0,o.jsx)(n.a,{href:"https://ollama.com/library/nomic-embed-text",children:(0,o.jsx)(n.code,{children:"nomic-embed-text"})})," embedding model, which provides high-quality embeddings optimized for retrieval tasks."]}),"\n",(0,o.jsxs)(n.p,{children:["To run models in ",(0,o.jsx)(n.a,{href:"https://docs.ollama.com/cloud",children:(0,o.jsx)(n.strong,{children:"Ollama Cloud"})}),", follow these steps:"]}),"\n",(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Sign in to Ollama Cloud.\nIn a terminal, enter ",(0,o.jsx)(n.code,{children:"ollama signin"})," to connect your local environment with Ollama Cloud."]}),"\n",(0,o.jsxs)(n.li,{children:["To run the model, in Ollama, select the ",(0,o.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model, or run ",(0,o.jsx)(n.code,{children:"ollama run gpt-oss:20b-cloud"})," in a terminal.\nOllama Cloud models are run at the same URL as your local Ollama server at ",(0,o.jsx)(n.code,{children:"http://localhost:11434"}),", and automatically offloaded to Ollama's cloud service."]}),"\n",(0,o.jsxs)(n.li,{children:["Connect OpenRAG to the same local Ollama server as you would for local models in onboarding, using the default address of ",(0,o.jsx)(n.code,{children:"http://localhost:11434"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the ",(0,o.jsx)(n.strong,{children:"Language model"})," field, select the ",(0,o.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model."]}),"\n"]}),"\n",(0,o.jsx)("br",{}),"\n",(0,o.jsxs)(n.p,{children:["To run models on a ",(0,o.jsx)(n.strong,{children:"remote Ollama server"}),", follow these steps:"]}),"\n",(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsx)(n.li,{children:"Ensure your remote Ollama server is accessible from your OpenRAG instance."}),"\n",(0,o.jsxs)(n.li,{children:["In the ",(0,o.jsx)(n.strong,{children:"Ollama Base URL"})," field, enter your remote Ollama server's base URL, such as ",(0,o.jsx)(n.code,{children:"http://your-remote-server:11434"}),".\nOpenRAG connects to the remote Ollama server and populates the lists with the server's available models."]}),"\n",(0,o.jsxs)(n.li,{children:["Select your ",(0,o.jsx)(n.strong,{children:"Embedding model"})," and ",(0,o.jsx)(n.strong,{children:"Language model"})," from the available options."]}),"\n"]})]})}function c(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(a,{...e})}):a(e)}const d=[{value:"Application onboarding",id:"application-onboarding",level:2}];function h(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,r.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,o.jsxs)(o.Fragment,{children:[(0,o.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,o.jsx)(n.p,{children:"The first time you start OpenRAG, regardless of how you installed it, you must complete application onboarding."}),"\n",(0,o.jsxs)(n.p,{children:["Some of these variables, such as the embedding models, can be changed seamlessly after onboarding.\nOthers are immutable and require you to destroy and recreate the OpenRAG containers.\nFor more information, see ",(0,o.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n",(0,o.jsx)(n.p,{children:"You can use different providers for your language model and embedding model, such as Anthropic for the language model and OpenAI for the embeddings model.\nAdditionally, you can set multiple embedding models."}),"\n",(0,o.jsx)(n.p,{children:"You only need to complete onboarding for your preferred providers."}),"\n",(0,o.jsxs)(i.A,{groupId:"Provider",children:[(0,o.jsxs)(t.A,{value:"Anthropic",label:"Anthropic",default:!0,children:[(0,o.jsx)(n.admonition,{type:"info",children:(0,o.jsx)(n.p,{children:"Anthropic doesn't provide embedding models. If you select Anthropic for your language model, you must select a different provider for embeddings."})}),(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Enable ",(0,o.jsx)(n.strong,{children:"Use environment Anthropic API key"})," to automatically use your key from the ",(0,o.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an Anthropic API key into the field."]}),"\n",(0,o.jsxs)(n.li,{children:["Under ",(0,o.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,o.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,o.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,o.jsx)(l.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]}),(0,o.jsx)(t.A,{value:"OpenAI",label:"OpenAI",children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Enable ",(0,o.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,o.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,o.jsxs)(n.li,{children:["Under ",(0,o.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,o.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,o.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,o.jsx)(l.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,o.jsx)(t.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Complete the fields for ",(0,o.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,o.jsx)(n.strong,{children:"IBM Project ID"}),", and ",(0,o.jsx)(n.strong,{children:"IBM API key"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,o.jsxs)(n.li,{children:["Under ",(0,o.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,o.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,o.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,o.jsx)(l.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,o.jsxs)(t.A,{value:"Ollama",label:"Ollama",children:[(0,o.jsx)(n.admonition,{type:"info",children:(0,o.jsxs)(n.p,{children:["Ollama isn't installed with OpenRAG. To install Ollama, see the ",(0,o.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["To connect to an Ollama server running on your local machine, enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,o.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG connects to the Ollama server and populates the model lists with the server's available models."]}),"\n",(0,o.jsxs)(n.li,{children:["Select the ",(0,o.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,o.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.","\n",(0,o.jsxs)(s,{closed:!0,children:[(0,o.jsx)("summary",{children:"Ollama model selection and external server configuration"}),(0,o.jsx)(c,{})]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function p(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(h,{...e})}):h(e)}},3862:(e,n,s)=>{s.d(n,{Ay:()=>t,RM:()=>l});var o=s(4848),r=s(8453);const l=[];function i(e){const n={a:"a",admonition:"admonition",code:"code",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",...(0,r.R)(),...e.components};return(0,o.jsxs)(o.Fragment,{children:[(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install",children:"Install WSL"})," with the Ubuntu distribution using WSL 2:"]}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-powershell",children:"wsl --install -d Ubuntu\n"})}),"\n",(0,o.jsxs)(n.p,{children:["For new installations, the ",(0,o.jsx)(n.code,{children:"wsl --install"})," command uses WSL 2 and Ubuntu by default."]}),"\n",(0,o.jsxs)(n.p,{children:["For existing WSL installations, you can ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#change-the-default-linux-distribution-installed",children:"change the distribution"})," and ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#upgrade-version-from-wsl-1-to-wsl-2",children:"check the WSL version"}),"."]}),"\n",(0,o.jsx)(n.admonition,{title:"Known limitation",type:"warning",children:(0,o.jsx)(n.p,{children:"OpenRAG isn't compatible with nested virtualization, which can cause networking issues.\nDon't install OpenRAG on a WSL distribution that is installed inside a Windows VM.\nInstead, install OpenRAG on your base OS or a non-nested Linux VM."})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#ways-to-run-multiple-linux-distributions-with-wsl",children:"Start your WSL Ubuntu distribution"})," if it doesn't start automatically."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/setup/environment#set-up-your-linux-username-and-password",children:"Set up a username and password for your WSL distribution"}),"."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/tutorials/wsl-containers",children:"Install Docker Desktop for Windows with WSL 2"}),". When you reach the Docker Desktop ",(0,o.jsx)(n.strong,{children:"WSL integration"})," settings, make sure your Ubuntu distribution is enabled, and then click ",(0,o.jsx)(n.strong,{children:"Apply & Restart"})," to enable Docker support in WSL."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Install and run OpenRAG from within your WSL Ubuntu distribution."}),"\n"]}),"\n"]}),"\n",(0,o.jsx)("br",{}),"\n",(0,o.jsxs)(n.p,{children:["If you encounter issues with port forwarding or the Windows Firewall, you might need to adjust the ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/security/operating-system-security/network-security/windows-firewall/hyper-v-firewall",children:"Hyper-V firewall settings"})," to allow communication between your WSL distribution and the Windows host. For more troubleshooting advice for networking issues, see ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/troubleshooting#common-issues",children:"Troubleshooting WSL common issues"}),"."]})]})}function t(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(i,{...e})}):i(e)}},5788:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>p,contentTitle:()=>h,default:()=>j,frontMatter:()=>d,metadata:()=>o,toc:()=>x});const o=JSON.parse('{"id":"get-started/docker","title":"Install OpenRAG containers","description":"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments:","source":"@site/docs/get-started/docker.mdx","sourceDirName":"get-started","slug":"/docker","permalink":"/docker","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/docker.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG containers","slug":"/docker"},"sidebar":"tutorialSidebar","previous":{"title":"Install OpenRAG with TUI","permalink":"/install"},"next":{"title":"Flows","permalink":"/agents"}}');var r=s(4848),l=s(8453),i=s(1470),t=s(9365),a=s(3656),c=s(3862);const d={title:"Install OpenRAG containers",slug:"/docker"},h=void 0,p={},x=[{value:"Prerequisites",id:"prerequisites",level:2},...c.RM,{value:"Install OpenRAG with Docker Compose",id:"install-openrag-with-docker-compose",level:2},...a.RM,{value:"Container management commands",id:"container-management-commands",level:2},{value:"Upgrade containers",id:"upgrade-containers",level:3},{value:"Reset containers (destructive)",id:"reset-containers",level:3}];function m(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",table:"table",tbody:"tbody",td:"td",th:"th",thead:"thead",tr:"tr",ul:"ul",...(0,l.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml",children:(0,r.jsx)(n.code,{children:"docker-compose.yml"})})," is an OpenRAG deployment with GPU support for accelerated AI processing. This Docker Compose file requires an NVIDIA GPU with ",(0,r.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml",children:(0,r.jsx)(n.code,{children:"docker-compose-cpu.yml"})})," is a CPU-only version of OpenRAG for systems without NVIDIA GPU support. Use this Docker Compose file for environments where GPU drivers aren't available."]}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Install the following:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python"})," version 3.13 or later."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,r.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:(0,r.jsx)(n.code,{children:"podman-compose"})})," or ",(0,r.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". To use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Microsoft Windows only: To run OpenRAG on Windows, you must use the Windows Subsystem for Linux (WSL)."}),"\n",(0,r.jsxs)(s,{children:[(0,r.jsx)("summary",{children:"Install WSL for OpenRAG"}),(0,r.jsx)(c.Ay,{})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Prepare model providers and credentials."}),"\n",(0,r.jsxs)(n.p,{children:["During ",(0,r.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),", you must select language model and embedding model providers.\nIf your chosen provider offers both types, you can use the same provider for both selections.\nIf your provider offers only one type, such as Anthropic, you must select two providers."]}),"\n",(0,r.jsx)(n.p,{children:"Gather the credentials and connection details for your chosen model providers before starting onboarding:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["OpenAI: Create an ",(0,r.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Anthropic language models: Create an ",(0,r.jsx)(n.a,{href:"https://www.anthropic.com/docs/api/reference",children:"Anthropic API key"}),"."]}),"\n",(0,r.jsx)(n.li,{children:"IBM watsonx.ai: Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment."}),"\n",(0,r.jsxs)(n.li,{children:["Ollama: Use the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"})," to set up your Ollama instance locally, in the cloud, or on a remote server, and then get your Ollama server's base URL."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,r.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. This is required to use the GPU-accelerated Docker Compose file. If you choose not to use GPU support, you must use the CPU-only Docker Compose file instead."]}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"install-openrag-with-docker-compose",children:"Install OpenRAG with Docker Compose"}),"\n",(0,r.jsx)(n.p,{children:"To install OpenRAG with Docker Compose, do the following:"}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Clone the OpenRAG repository."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"git clone https://github.com/langflow-ai/openrag.git\ncd openrag\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Install dependencies."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"uv sync\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Copy the example ",(0,r.jsx)(n.code,{children:".env"})," file included in the repository root.\nThe example file includes all environment variables with comments to guide you in finding and setting their values."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"cp .env.example .env\n"})}),"\n",(0,r.jsxs)(n.p,{children:["Alternatively, create a new ",(0,r.jsx)(n.code,{children:".env"})," file in the repository root."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{children:"touch .env\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["The Docker Compose files are populated with the values from your ",(0,r.jsx)(n.code,{children:".env"})," file.\nThe ",(0,r.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," value must be set.\n",(0,r.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," can be automatically generated when using the TUI, but for a Docker Compose installation, you can set it manually instead. To generate an OpenSearch admin password, see the ",(0,r.jsx)(n.a,{href:"https://docs.opensearch.org/latest/security/configuration/demo-configuration/#setting-up-a-custom-admin-password",children:"OpenSearch documentation"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"The following values are optional:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"OPENAI_API_KEY=your_openai_api_key\nLANGFLOW_SECRET_KEY=your_secret_key\n"})}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.code,{children:"OPENAI_API_KEY"})," is optional. You can provide it during ",(0,r.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"})," or choose a different model provider. If you want to set it in your ",(0,r.jsx)(n.code,{children:".env"})," file, you can find your OpenAI API key in your ",(0,r.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI account"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.code,{children:"LANGFLOW_SECRET_KEY"})," is optional. Langflow will auto-generate it if not set. For more information, see the ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key",children:"Langflow documentation"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"The following Langflow configuration values are optional but important to consider:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"LANGFLOW_SUPERUSER=admin\nLANGFLOW_SUPERUSER_PASSWORD=your_langflow_password\n"})}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.code,{children:"LANGFLOW_SUPERUSER"})," defaults to ",(0,r.jsx)(n.code,{children:"admin"}),". You can omit it or set it to a different username. ",(0,r.jsx)(n.code,{children:"LANGFLOW_SUPERUSER_PASSWORD"})," is optional. If omitted, Langflow runs in ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login",children:"autologin mode"})," with no password required. If set, Langflow requires password authentication."]}),"\n",(0,r.jsxs)(n.p,{children:["For more information on configuring OpenRAG with environment variables, see ",(0,r.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Start ",(0,r.jsx)(n.code,{children:"docling serve"})," on the host machine.\nOpenRAG Docker installations require that ",(0,r.jsx)(n.code,{children:"docling serve"})," is running on port 5001 on the host machine.\nThis enables ",(0,r.jsx)(n.a,{href:"https://opensource.apple.com/projects/mlx/",children:"Mac MLX"})," support for document processing."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py start --port 5001\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Confirm ",(0,r.jsx)(n.code,{children:"docling serve"})," is running."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{children:"uv run python scripts/docling_ctl.py status\n"})}),"\n",(0,r.jsxs)(n.p,{children:["Make sure the response shows that ",(0,r.jsx)(n.code,{children:"docling serve"})," is running, for example:"]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"Status: running\nEndpoint: http://127.0.0.1:5001\nDocs: http://127.0.0.1:5001/docs\nPID: 27746\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Deploy OpenRAG locally with Docker Compose based on your deployment type."}),"\n",(0,r.jsxs)(i.A,{groupId:"Compose file",children:[(0,r.jsx)(t.A,{value:"docker-compose.yml",label:"docker-compose.yml",default:!0,children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose build\ndocker compose up -d\n"})})}),(0,r.jsx)(t.A,{value:"docker-compose-cpu.yml",label:"docker-compose-cpu.yml",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose -f docker-compose-cpu.yml up -d\n"})})})]}),"\n",(0,r.jsx)(n.p,{children:"The OpenRAG Docker Compose file starts five containers:"}),"\n",(0,r.jsxs)(n.table,{children:[(0,r.jsx)(n.thead,{children:(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.th,{children:"Container Name"}),(0,r.jsx)(n.th,{children:"Default Address"}),(0,r.jsx)(n.th,{children:"Purpose"})]})}),(0,r.jsxs)(n.tbody,{children:[(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenRAG Backend"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})}),(0,r.jsx)(n.td,{children:"FastAPI server and core functionality."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenRAG Frontend"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})}),(0,r.jsx)(n.td,{children:"React web interface for users."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"Langflow"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})}),(0,r.jsx)(n.td,{children:"AI workflow engine and flow management."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenSearch"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:9200",children:"http://localhost:9200"})}),(0,r.jsx)(n.td,{children:"Vector database for document storage."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenSearch Dashboards"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:5601",children:"http://localhost:5601"})}),(0,r.jsx)(n.td,{children:"Database administration interface."})]})]})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Verify installation by confirming all services are running."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose ps\n"})}),"\n",(0,r.jsx)(n.p,{children:"You can now access OpenRAG at the following endpoints:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.strong,{children:"Frontend"}),": ",(0,r.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.strong,{children:"Backend API"}),": ",(0,r.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.strong,{children:"Langflow"}),": ",(0,r.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Continue with ",(0,r.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.p,{children:["To stop ",(0,r.jsx)(n.code,{children:"docling serve"})," when you're done with your OpenRAG deployment, run:"]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py stop\n"})}),"\n",(0,r.jsx)(a.Ay,{}),"\n",(0,r.jsx)(n.h2,{id:"container-management-commands",children:"Container management commands"}),"\n",(0,r.jsxs)(n.p,{children:["Manage your OpenRAG containers with the following commands.\nThese commands are also available in the TUI's ",(0,r.jsx)(n.a,{href:"/install#status",children:"Status menu"}),"."]}),"\n",(0,r.jsx)(n.h3,{id:"upgrade-containers",children:"Upgrade containers"}),"\n",(0,r.jsx)(n.p,{children:"Upgrade your containers to the latest version while preserving your data."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose pull\ndocker compose up -d --force-recreate\n"})}),"\n",(0,r.jsx)(n.h3,{id:"reset-containers",children:"Reset containers (destructive)"}),"\n",(0,r.jsx)(n.admonition,{type:"warning",children:(0,r.jsx)(n.p,{children:"These are destructive operations that reset your OpenRAG deployment to an initial state.\nBe aware that data is lost and cannot be recovered after running these commands."})}),"\n",(0,r.jsxs)(i.A,{children:[(0,r.jsx)(t.A,{value:"docker-compose",label:"Docker Compose",default:!0,children:(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Rebuild containers: This command destroys and recreates the containers. Data stored exclusively on the containers is lost, such as Langflow flows.\nThe ",(0,r.jsx)(n.code,{children:".env"})," file, ",(0,r.jsx)(n.code,{children:"config"})," directory, ",(0,r.jsx)(n.code,{children:"./openrag-documents"})," directory, ",(0,r.jsx)(n.code,{children:"./opensearch-data"})," directory, and the ",(0,r.jsx)(n.code,{children:"conversations.json"})," file are preserved."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose up --build --force-recreate --remove-orphans\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Destroy and recreate containers with the option for additional data removal: These commands destroy the containers, and then recreate them.\nThis allows you to delete other OpenRAG data before recreating the containers."}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Destroy the containers, volumes, and local images, and then remove (prune) any additional Docker objects:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose down --volumes --remove-orphans --rmi local\ndocker system prune -f\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Optional: Remove data that wasn't deleted by the previous commands:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["OpenRAG's ",(0,r.jsx)(n.code,{children:".env"})," file"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of OpenRAG's ",(0,r.jsx)(n.code,{children:"config"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./openrag-documents"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./opensearch-data"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The ",(0,r.jsx)(n.code,{children:"conversations.json"})," file"]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Recreate the containers:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose up -d\n"})}),"\n"]}),"\n"]}),"\n"]}),"\n"]})}),(0,r.jsx)(t.A,{value:"Podman-compose",label:"Podman Compose",children:(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Rebuild containers: This command destroys and recreates the containers. Data stored exclusively on the containers is lost, such as Langflow flows.\nThe ",(0,r.jsx)(n.code,{children:".env"})," file, ",(0,r.jsx)(n.code,{children:"config"})," directory, ",(0,r.jsx)(n.code,{children:"./openrag-documents"})," directory, ",(0,r.jsx)(n.code,{children:"./opensearch-data"})," directory, and the ",(0,r.jsx)(n.code,{children:"conversations.json"})," file are preserved."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman-compose up --build --force-recreate --remove-orphans\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Destroy and recreate containers with the option for additional data removal: These commands destroy the containers, and then recreate them.\nThis allows you to delete other OpenRAG data before recreating the containers."}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Destroy the containers, volumes, and local images, and then remove (prune) any additional Podman objects:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman-compose down --volumes --remove-orphans --rmi local\npodman system prune -f\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Optional: Remove data that wasn't deleted by the previous commands:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["OpenRAG's ",(0,r.jsx)(n.code,{children:".env"})," file"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of OpenRAG's ",(0,r.jsx)(n.code,{children:"config"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./openrag-documents"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./opensearch-data"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The ",(0,r.jsx)(n.code,{children:"conversations.json"})," file"]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Recreate the containers:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman-compose up -d\n"})}),"\n"]}),"\n"]}),"\n"]}),"\n"]})}),(0,r.jsx)(t.A,{value:"docker",label:"Docker",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Stop all running containers:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker stop $(docker ps -q)\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all containers, including stopped containers:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker rm --force $(docker ps -aq)\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all images:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker rmi --force $(docker images -q)\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all volumes:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker volume prune --force\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all networks except the default network:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker network prune --force\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Clean up any leftover data:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker system prune --all --force --volumes\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Optional: Remove data that wasn't deleted by the previous commands:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["OpenRAG's ",(0,r.jsx)(n.code,{children:".env"})," file"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of OpenRAG's ",(0,r.jsx)(n.code,{children:"config"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./openrag-documents"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./opensearch-data"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The ",(0,r.jsx)(n.code,{children:"conversations.json"})," file"]}),"\n"]}),"\n"]}),"\n"]})}),(0,r.jsx)(t.A,{value:"podman",label:"Podman",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Stop all running containers:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman stop --all\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all containers, including stopped containers:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman rm --all --force\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all images:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman rmi --all --force\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all volumes:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman volume prune --force\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Remove all networks except the default network:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman network prune --force\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Clean up any leftover data:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"podman system prune --all --force --volumes\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Optional: Remove data that wasn't deleted by the previous commands:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["OpenRAG's ",(0,r.jsx)(n.code,{children:".env"})," file"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of OpenRAG's ",(0,r.jsx)(n.code,{children:"config"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./openrag-documents"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The contents of the ",(0,r.jsx)(n.code,{children:"./opensearch-data"})," directory"]}),"\n",(0,r.jsxs)(n.li,{children:["The ",(0,r.jsx)(n.code,{children:"conversations.json"})," file"]}),"\n"]}),"\n"]}),"\n"]})})]}),"\n",(0,r.jsxs)(n.p,{children:["After resetting your containers, you must repeat ",(0,r.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]})]})}function j(e={}){const{wrapper:n}={...(0,l.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(m,{...e})}):m(e)}}}]); \ No newline at end of file diff --git a/assets/js/749371cc.ed259beb.js b/assets/js/749371cc.ed259beb.js deleted file mode 100644 index a3985d84..00000000 --- a/assets/js/749371cc.ed259beb.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[2272],{3656:(e,n,s)=>{s.d(n,{Ay:()=>p,RM:()=>c});var o=s(4848),r=s(8453),i=s(7733),l=s(1470),t=s(9365);function a(e){const n={a:"a",code:"code",li:"li",ol:"ol",p:"p",strong:"strong",...(0,r.R)(),...e.components};return(0,o.jsxs)(o.Fragment,{children:[(0,o.jsx)(n.p,{children:"Using Ollama for your OpenRAG language model provider offers greater flexibility and configuration, but can also be overwhelming to start.\nThese recommendations are a reasonable starting point for users with at least one GPU and experience running LLMs locally."}),"\n",(0,o.jsxs)(n.p,{children:["For best performance, OpenRAG recommends OpenAI's ",(0,o.jsx)(n.code,{children:"gpt-oss:20b"})," language model. However, this model uses 16GB of RAM, so consider using Ollama Cloud or running Ollama on a remote machine."]}),"\n",(0,o.jsxs)(n.p,{children:["For generating embeddings, OpenRAG recommends the ",(0,o.jsx)(n.a,{href:"https://ollama.com/library/nomic-embed-text",children:(0,o.jsx)(n.code,{children:"nomic-embed-text"})})," embedding model, which provides high-quality embeddings optimized for retrieval tasks."]}),"\n",(0,o.jsxs)(n.p,{children:["To run models in ",(0,o.jsx)(n.a,{href:"https://docs.ollama.com/cloud",children:(0,o.jsx)(n.strong,{children:"Ollama Cloud"})}),", follow these steps:"]}),"\n",(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Sign in to Ollama Cloud.\nIn a terminal, enter ",(0,o.jsx)(n.code,{children:"ollama signin"})," to connect your local environment with Ollama Cloud."]}),"\n",(0,o.jsxs)(n.li,{children:["To run the model, in Ollama, select the ",(0,o.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model, or run ",(0,o.jsx)(n.code,{children:"ollama run gpt-oss:20b-cloud"})," in a terminal.\nOllama Cloud models are run at the same URL as your local Ollama server at ",(0,o.jsx)(n.code,{children:"http://localhost:11434"}),", and automatically offloaded to Ollama's cloud service."]}),"\n",(0,o.jsxs)(n.li,{children:["Connect OpenRAG to the same local Ollama server as you would for local models in onboarding, using the default address of ",(0,o.jsx)(n.code,{children:"http://localhost:11434"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the ",(0,o.jsx)(n.strong,{children:"Language model"})," field, select the ",(0,o.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model."]}),"\n"]}),"\n",(0,o.jsx)("br",{}),"\n",(0,o.jsxs)(n.p,{children:["To run models on a ",(0,o.jsx)(n.strong,{children:"remote Ollama server"}),", follow these steps:"]}),"\n",(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsx)(n.li,{children:"Ensure your remote Ollama server is accessible from your OpenRAG instance."}),"\n",(0,o.jsxs)(n.li,{children:["In the ",(0,o.jsx)(n.strong,{children:"Ollama Base URL"})," field, enter your remote Ollama server's base URL, such as ",(0,o.jsx)(n.code,{children:"http://your-remote-server:11434"}),".\nOpenRAG connects to the remote Ollama server and populates the lists with the server's available models."]}),"\n",(0,o.jsxs)(n.li,{children:["Select your ",(0,o.jsx)(n.strong,{children:"Embedding model"})," and ",(0,o.jsx)(n.strong,{children:"Language model"})," from the available options."]}),"\n"]})]})}function d(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(a,{...e})}):a(e)}const c=[{value:"Application onboarding",id:"application-onboarding",level:2}];function h(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,r.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,o.jsxs)(o.Fragment,{children:[(0,o.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,o.jsx)(n.p,{children:"The first time you start OpenRAG, regardless of how you installed it, you must complete application onboarding."}),"\n",(0,o.jsxs)(n.p,{children:["Some of these variables, such as the embedding models, can be changed seamlessly after onboarding.\nOthers are immutable and require you to destroy and recreate the OpenRAG containers.\nFor more information, see ",(0,o.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n",(0,o.jsx)(n.p,{children:"You can use different providers for your language model and embedding model, such as Anthropic for the language model and OpenAI for the embeddings model.\nAdditionally, you can set multiple embedding models."}),"\n",(0,o.jsx)(n.p,{children:"You only need to complete onboarding for your preferred providers."}),"\n",(0,o.jsxs)(l.A,{groupId:"Provider",children:[(0,o.jsxs)(t.A,{value:"Anthropic",label:"Anthropic",default:!0,children:[(0,o.jsx)(n.admonition,{type:"info",children:(0,o.jsx)(n.p,{children:"Anthropic doesn't provide embedding models. If you select Anthropic for your language model, you must select a different provider for embeddings."})}),(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Enable ",(0,o.jsx)(n.strong,{children:"Use environment Anthropic API key"})," to automatically use your key from the ",(0,o.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an Anthropic API key into the field."]}),"\n",(0,o.jsxs)(n.li,{children:["Under ",(0,o.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,o.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,o.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,o.jsx)(i.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]}),(0,o.jsx)(t.A,{value:"OpenAI",label:"OpenAI",children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Enable ",(0,o.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,o.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,o.jsxs)(n.li,{children:["Under ",(0,o.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,o.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,o.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,o.jsx)(i.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,o.jsx)(t.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["Complete the fields for ",(0,o.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,o.jsx)(n.strong,{children:"IBM Project ID"}),", and ",(0,o.jsx)(n.strong,{children:"IBM API key"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,o.jsxs)(n.li,{children:["Under ",(0,o.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,o.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,o.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,o.jsx)(i.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,o.jsxs)(t.A,{value:"Ollama",label:"Ollama",children:[(0,o.jsx)(n.admonition,{type:"info",children:(0,o.jsxs)(n.p,{children:["Ollama isn't installed with OpenRAG. To install Ollama, see the ",(0,o.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["To connect to an Ollama server running on your local machine, enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,o.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG connects to the Ollama server and populates the model lists with the server's available models."]}),"\n",(0,o.jsxs)(n.li,{children:["Select the ",(0,o.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,o.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.","\n",(0,o.jsxs)(s,{closed:!0,children:[(0,o.jsx)("summary",{children:"Ollama model selection and external server configuration"}),(0,o.jsx)(d,{})]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["Click ",(0,o.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,o.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,o.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,o.jsxs)(n.li,{children:["Continue with the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function p(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(h,{...e})}):h(e)}},3862:(e,n,s)=>{s.d(n,{Ay:()=>t,RM:()=>i});var o=s(4848),r=s(8453);const i=[];function l(e){const n={a:"a",admonition:"admonition",code:"code",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",...(0,r.R)(),...e.components};return(0,o.jsxs)(o.Fragment,{children:[(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install",children:"Install WSL"})," with the Ubuntu distribution using WSL 2:"]}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-powershell",children:"wsl --install -d Ubuntu\n"})}),"\n",(0,o.jsxs)(n.p,{children:["For new installations, the ",(0,o.jsx)(n.code,{children:"wsl --install"})," command uses WSL 2 and Ubuntu by default."]}),"\n",(0,o.jsxs)(n.p,{children:["For existing WSL installations, you can ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#change-the-default-linux-distribution-installed",children:"change the distribution"})," and ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#upgrade-version-from-wsl-1-to-wsl-2",children:"check the WSL version"}),"."]}),"\n",(0,o.jsx)(n.admonition,{title:"Known limitation",type:"warning",children:(0,o.jsx)(n.p,{children:"OpenRAG isn't compatible with nested virtualization, which can cause networking issues.\nDon't install OpenRAG on a WSL distribution that is installed inside a Windows VM.\nInstead, install OpenRAG on your base OS or a non-nested Linux VM."})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#ways-to-run-multiple-linux-distributions-with-wsl",children:"Start your WSL Ubuntu distribution"})," if it doesn't start automatically."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/setup/environment#set-up-your-linux-username-and-password",children:"Set up a username and password for your WSL distribution"}),"."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/tutorials/wsl-containers",children:"Install Docker Desktop for Windows with WSL 2"}),". When you reach the Docker Desktop ",(0,o.jsx)(n.strong,{children:"WSL integration"})," settings, make sure your Ubuntu distribution is enabled, and then click ",(0,o.jsx)(n.strong,{children:"Apply & Restart"})," to enable Docker support in WSL."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Install and run OpenRAG from within your WSL Ubuntu distribution."}),"\n"]}),"\n"]}),"\n",(0,o.jsx)("br",{}),"\n",(0,o.jsxs)(n.p,{children:["If you encounter issues with port forwarding or the Windows Firewall, you might need to adjust the ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/security/operating-system-security/network-security/windows-firewall/hyper-v-firewall",children:"Hyper-V firewall settings"})," to allow communication between your WSL distribution and the Windows host. For more troubleshooting advice for networking issues, see ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/troubleshooting#common-issues",children:"Troubleshooting WSL common issues"}),"."]})]})}function t(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(l,{...e})}):l(e)}},5788:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>p,contentTitle:()=>h,default:()=>x,frontMatter:()=>c,metadata:()=>o,toc:()=>u});const o=JSON.parse('{"id":"get-started/docker","title":"Install OpenRAG containers","description":"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments:","source":"@site/docs/get-started/docker.mdx","sourceDirName":"get-started","slug":"/docker","permalink":"/docker","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/docker.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG containers","slug":"/docker"},"sidebar":"tutorialSidebar","previous":{"title":"Install OpenRAG with TUI","permalink":"/install"},"next":{"title":"Flows","permalink":"/agents"}}');var r=s(4848),i=s(8453),l=s(1470),t=s(9365),a=s(3656),d=s(3862);const c={title:"Install OpenRAG containers",slug:"/docker"},h=void 0,p={},u=[{value:"Prerequisites",id:"prerequisites",level:2},...d.RM,{value:"Install OpenRAG with Docker Compose",id:"install-openrag-with-docker-compose",level:2},...a.RM,{value:"Container management commands",id:"container-management-commands",level:2},{value:"Upgrade containers",id:"upgrade-containers",level:3},{value:"Rebuild containers (destructive)",id:"rebuild-containers-destructive",level:3},{value:"Remove all containers and data (destructive)",id:"remove-all-containers-and-data-destructive",level:3}];function m(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",table:"table",tbody:"tbody",td:"td",th:"th",thead:"thead",tr:"tr",ul:"ul",...(0,i.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"OpenRAG has two Docker Compose files. Both files deploy the same applications and containers locally, but they are for different environments:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose.yml",children:(0,r.jsx)(n.code,{children:"docker-compose.yml"})})," is an OpenRAG deployment with GPU support for accelerated AI processing. This Docker Compose file requires an NVIDIA GPU with ",(0,r.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml",children:(0,r.jsx)(n.code,{children:"docker-compose-cpu.yml"})})," is a CPU-only version of OpenRAG for systems without NVIDIA GPU support. Use this Docker Compose file for environments where GPU drivers aren't available."]}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Install the following:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python"})," version 3.13 or later."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,r.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:(0,r.jsx)(n.code,{children:"podman-compose"})})," or ",(0,r.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". To use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Microsoft Windows only: To run OpenRAG on Windows, you must use the Windows Subsystem for Linux (WSL)."}),"\n",(0,r.jsxs)(s,{children:[(0,r.jsx)("summary",{children:"Install WSL for OpenRAG"}),(0,r.jsx)(d.Ay,{})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Prepare model providers and credentials."}),"\n",(0,r.jsxs)(n.p,{children:["During ",(0,r.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),", you must select language model and embedding model providers.\nIf your chosen provider offers both types, you can use the same provider for both selections.\nIf your provider offers only one type, such as Anthropic, you must select two providers."]}),"\n",(0,r.jsx)(n.p,{children:"Gather the credentials and connection details for your chosen model providers before starting onboarding:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:["OpenAI: Create an ",(0,r.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Anthropic language models: Create an ",(0,r.jsx)(n.a,{href:"https://www.anthropic.com/docs/api/reference",children:"Anthropic API key"}),"."]}),"\n",(0,r.jsx)(n.li,{children:"IBM watsonx.ai: Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment."}),"\n",(0,r.jsxs)(n.li,{children:["Ollama: Use the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"})," to set up your Ollama instance locally, in the cloud, or on a remote server, and then get your Ollama server's base URL."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,r.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. This is required to use the GPU-accelerated Docker Compose file. If you choose not to use GPU support, you must use the CPU-only Docker Compose file instead."]}),"\n"]}),"\n"]}),"\n",(0,r.jsx)(n.h2,{id:"install-openrag-with-docker-compose",children:"Install OpenRAG with Docker Compose"}),"\n",(0,r.jsx)(n.p,{children:"To install OpenRAG with Docker Compose, do the following:"}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Clone the OpenRAG repository."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"git clone https://github.com/langflow-ai/openrag.git\ncd openrag\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Install dependencies."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"uv sync\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Copy the example ",(0,r.jsx)(n.code,{children:".env"})," file included in the repository root.\nThe example file includes all environment variables with comments to guide you in finding and setting their values."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"cp .env.example .env\n"})}),"\n",(0,r.jsxs)(n.p,{children:["Alternatively, create a new ",(0,r.jsx)(n.code,{children:".env"})," file in the repository root."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{children:"touch .env\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["The Docker Compose files are populated with the values from your ",(0,r.jsx)(n.code,{children:".env"})," file.\nThe ",(0,r.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," value must be set.\n",(0,r.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," can be automatically generated when using the TUI, but for a Docker Compose installation, you can set it manually instead. To generate an OpenSearch admin password, see the ",(0,r.jsx)(n.a,{href:"https://docs.opensearch.org/latest/security/configuration/demo-configuration/#setting-up-a-custom-admin-password",children:"OpenSearch documentation"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"The following values are optional:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"OPENAI_API_KEY=your_openai_api_key\nLANGFLOW_SECRET_KEY=your_secret_key\n"})}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.code,{children:"OPENAI_API_KEY"})," is optional. You can provide it during ",(0,r.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"})," or choose a different model provider. If you want to set it in your ",(0,r.jsx)(n.code,{children:".env"})," file, you can find your OpenAI API key in your ",(0,r.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI account"}),"."]}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.code,{children:"LANGFLOW_SECRET_KEY"})," is optional. Langflow will auto-generate it if not set. For more information, see the ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key",children:"Langflow documentation"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"The following Langflow configuration values are optional but important to consider:"}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"LANGFLOW_SUPERUSER=admin\nLANGFLOW_SUPERUSER_PASSWORD=your_langflow_password\n"})}),"\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.code,{children:"LANGFLOW_SUPERUSER"})," defaults to ",(0,r.jsx)(n.code,{children:"admin"}),". You can omit it or set it to a different username. ",(0,r.jsx)(n.code,{children:"LANGFLOW_SUPERUSER_PASSWORD"})," is optional. If omitted, Langflow runs in ",(0,r.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login",children:"autologin mode"})," with no password required. If set, Langflow requires password authentication."]}),"\n",(0,r.jsxs)(n.p,{children:["For more information on configuring OpenRAG with environment variables, see ",(0,r.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Start ",(0,r.jsx)(n.code,{children:"docling serve"})," on the host machine.\nOpenRAG Docker installations require that ",(0,r.jsx)(n.code,{children:"docling serve"})," is running on port 5001 on the host machine.\nThis enables ",(0,r.jsx)(n.a,{href:"https://opensource.apple.com/projects/mlx/",children:"Mac MLX"})," support for document processing."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py start --port 5001\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Confirm ",(0,r.jsx)(n.code,{children:"docling serve"})," is running."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{children:"uv run python scripts/docling_ctl.py status\n"})}),"\n",(0,r.jsxs)(n.p,{children:["Make sure the response shows that ",(0,r.jsx)(n.code,{children:"docling serve"})," is running, for example:"]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"Status: running\nEndpoint: http://127.0.0.1:5001\nDocs: http://127.0.0.1:5001/docs\nPID: 27746\n"})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Deploy OpenRAG locally with Docker Compose based on your deployment type."}),"\n",(0,r.jsxs)(l.A,{groupId:"Compose file",children:[(0,r.jsx)(t.A,{value:"docker-compose.yml",label:"docker-compose.yml",default:!0,children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose build\ndocker compose up -d\n"})})}),(0,r.jsx)(t.A,{value:"docker-compose-cpu.yml",label:"docker-compose-cpu.yml",children:(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose -f docker-compose-cpu.yml up -d\n"})})})]}),"\n",(0,r.jsx)(n.p,{children:"The OpenRAG Docker Compose file starts five containers:"}),"\n",(0,r.jsxs)(n.table,{children:[(0,r.jsx)(n.thead,{children:(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.th,{children:"Container Name"}),(0,r.jsx)(n.th,{children:"Default Address"}),(0,r.jsx)(n.th,{children:"Purpose"})]})}),(0,r.jsxs)(n.tbody,{children:[(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenRAG Backend"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})}),(0,r.jsx)(n.td,{children:"FastAPI server and core functionality."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenRAG Frontend"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})}),(0,r.jsx)(n.td,{children:"React web interface for users."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"Langflow"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})}),(0,r.jsx)(n.td,{children:"AI workflow engine and flow management."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenSearch"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:9200",children:"http://localhost:9200"})}),(0,r.jsx)(n.td,{children:"Vector database for document storage."})]}),(0,r.jsxs)(n.tr,{children:[(0,r.jsx)(n.td,{children:"OpenSearch Dashboards"}),(0,r.jsx)(n.td,{children:(0,r.jsx)(n.a,{href:"http://localhost:5601",children:"http://localhost:5601"})}),(0,r.jsx)(n.td,{children:"Database administration interface."})]})]})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Verify installation by confirming all services are running."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose ps\n"})}),"\n",(0,r.jsx)(n.p,{children:"You can now access OpenRAG at the following endpoints:"}),"\n",(0,r.jsxs)(n.ul,{children:["\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.strong,{children:"Frontend"}),": ",(0,r.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.strong,{children:"Backend API"}),": ",(0,r.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})]}),"\n",(0,r.jsxs)(n.li,{children:[(0,r.jsx)(n.strong,{children:"Langflow"}),": ",(0,r.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:["Continue with ",(0,r.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]}),"\n"]}),"\n"]}),"\n",(0,r.jsxs)(n.p,{children:["To stop ",(0,r.jsx)(n.code,{children:"docling serve"})," when you're done with your OpenRAG deployment, run:"]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py stop\n"})}),"\n",(0,r.jsx)(a.Ay,{}),"\n",(0,r.jsx)(n.h2,{id:"container-management-commands",children:"Container management commands"}),"\n",(0,r.jsxs)(n.p,{children:["Manage your OpenRAG containers with the following commands.\nThese commands are also available in the TUI's ",(0,r.jsx)(n.a,{href:"/install#status",children:"Status menu"}),"."]}),"\n",(0,r.jsx)(n.h3,{id:"upgrade-containers",children:"Upgrade containers"}),"\n",(0,r.jsx)(n.p,{children:"Upgrade your containers to the latest version while preserving your data."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose pull\ndocker compose up -d --force-recreate\n"})}),"\n",(0,r.jsx)(n.h3,{id:"rebuild-containers-destructive",children:"Rebuild containers (destructive)"}),"\n",(0,r.jsxs)(n.p,{children:["Reset state by rebuilding all of your containers.\nYour OpenSearch and Langflow databases will be lost.\nDocuments stored in the ",(0,r.jsx)(n.code,{children:"./openrag-documents"})," directory will persist, since the directory is mounted as a volume in the OpenRAG backend container."]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose up --build --force-recreate --remove-orphans\n"})}),"\n",(0,r.jsx)(n.h3,{id:"remove-all-containers-and-data-destructive",children:"Remove all containers and data (destructive)"}),"\n",(0,r.jsx)(n.p,{children:"Completely remove your OpenRAG installation and delete all data.\nThis deletes all of your data, including OpenSearch data, uploaded documents, and authentication."}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-bash",children:"docker compose down --volumes --remove-orphans --rmi local\ndocker system prune -f\n"})})]})}function x(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(m,{...e})}):m(e)}}}]); \ No newline at end of file diff --git a/assets/js/ca2c3c0c.04fbbb9d.js b/assets/js/ca2c3c0c.7114a4c2.js similarity index 89% rename from assets/js/ca2c3c0c.04fbbb9d.js rename to assets/js/ca2c3c0c.7114a4c2.js index 50947d9f..06c147e0 100644 --- a/assets/js/ca2c3c0c.04fbbb9d.js +++ b/assets/js/ca2c3c0c.7114a4c2.js @@ -1 +1 @@ -"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[6919],{5421:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>x,contentTitle:()=>g,default:()=>m,frontMatter:()=>u,metadata:()=>o,toc:()=>j});const o=JSON.parse('{"id":"core-components/ingestion","title":"Ingest knowledge","description":"Upload documents to your OpenRAG OpenSearch instance to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.","source":"@site/docs/core-components/ingestion.mdx","sourceDirName":"core-components","slug":"/ingestion","permalink":"/ingestion","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/ingestion.mdx","tags":[],"version":"current","frontMatter":{"title":"Ingest knowledge","slug":"/ingestion"},"sidebar":"tutorialSidebar","previous":{"title":"Configure knowledge","permalink":"/knowledge"},"next":{"title":"Filter knowledge","permalink":"/knowledge-filters"}}');var t=s(4848),i=s(8453),r=s(7733),l=s(1470),c=s(9365),d=s(8401);const a=[];function h(e){const n={a:"a",code:"code",li:"li",p:"p",strong:"strong",ul:"ul",...(0,i.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,t.jsxs)(s,{children:[(0,t.jsx)("summary",{children:"About the OpenSearch Ingestion flow"}),(0,t.jsxs)(n.p,{children:["When you upload documents locally or with OAuth connectors, the ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow runs in the background.\nBy default, this flow uses Docling Serve to import and process documents."]}),(0,t.jsxs)(n.p,{children:["Like all ",(0,t.jsx)(n.a,{href:"/agents",children:"OpenRAG flows"}),", you can ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", and you can customize it if you want to change the knowledge ingestion settings."]}),(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow is comprised of several components that work together to process and store documents in your knowledge base:"]}),(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-docling#docling-serve",children:[(0,t.jsx)(n.strong,{children:"Docling Serve"})," component"]}),": Ingests files and processes them by connecting to OpenRAG's local Docling Serve service. The output is ",(0,t.jsx)(n.code,{children:"DoclingDocument"})," data that contains the extracted text and metadata from the documents."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-docling#export-doclingdocument",children:[(0,t.jsx)(n.strong,{children:"Export DoclingDocument"})," component"]}),": Exports processed ",(0,t.jsx)(n.code,{children:"DoclingDocument"})," data to Markdown format with image placeholders. This conversion standardizes the document data in preparation for further processing."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#dataframe-operations",children:[(0,t.jsx)(n.strong,{children:"DataFrame Operations"})," component"]}),": Three of these components run sequentially to add metadata to the document data: ",(0,t.jsx)(n.code,{children:"filename"}),", ",(0,t.jsx)(n.code,{children:"file_size"}),", and ",(0,t.jsx)(n.code,{children:"mimetype"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#split-text",children:[(0,t.jsx)(n.strong,{children:"Split Text"})," component"]}),": Splits the processed text into chunks, based on the configured ",(0,t.jsx)(n.a,{href:"/knowledge#knowledge-ingestion-settings",children:"chunk size and overlap settings"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Secret Input"})," component: If needed, four of these components securely fetch the ",(0,t.jsx)(n.a,{href:"/knowledge#auth",children:"OAuth authentication"})," configuration variables: ",(0,t.jsx)(n.code,{children:"CONNECTOR_TYPE"}),", ",(0,t.jsx)(n.code,{children:"OWNER"}),", ",(0,t.jsx)(n.code,{children:"OWNER_EMAIL"}),", and ",(0,t.jsx)(n.code,{children:"OWNER_NAME"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Create Data"})," component: Combines the authentication credentials from the ",(0,t.jsx)(n.strong,{children:"Secret Input"})," components into a structured data object that is associated with the document embeddings."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,t.jsx)(n.strong,{children:"Embedding Model"})," component"]}),": Generates vector embeddings using your selected ",(0,t.jsx)(n.a,{href:"/knowledge#set-the-embedding-model-and-dimensions",children:"embedding model"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,t.jsx)(n.strong,{children:"OpenSearch"})," component"]}),": Stores the processed documents and their embeddings in a ",(0,t.jsx)(n.code,{children:"documents"})," index of your OpenRAG ",(0,t.jsx)(n.a,{href:"/knowledge",children:"OpenSearch knowledge base"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default address for the OpenSearch instance is ",(0,t.jsx)(n.code,{children:"https://opensearch:9200"}),". To change this address, edit the ",(0,t.jsx)(n.code,{children:"OPENSEARCH_PORT"})," ",(0,t.jsx)(n.a,{href:"/reference/configuration#opensearch-settings",children:"environment variable"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default authentication method is JSON Web Token (JWT) authentication. If you ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"edit the flow"}),", you can select ",(0,t.jsx)(n.code,{children:"basic"})," auth mode, which uses the ",(0,t.jsx)(n.code,{children:"OPENSEARCH_USERNAME"})," and ",(0,t.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," ",(0,t.jsx)(n.a,{href:"/reference/configuration#opensearch-settings",children:"environment variables"})," for authentication instead of JWT."]}),"\n"]}),"\n"]})]})}function p(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(h,{...e})}):h(e)}const u={title:"Ingest knowledge",slug:"/ingestion"},g=void 0,x={},j=[{value:"Ingest local files and folders",id:"ingest-local-files-and-folders",level:2},...a,{value:"Ingest local files temporarily",id:"ingest-local-files-temporarily",level:2},...d.RM,{value:"Ingest files with OAuth connectors",id:"oauth-ingestion",level:2},{value:"Enable OAuth connectors",id:"enable-oauth-connectors",level:3},{value:"Authenticate and ingest files from cloud storage",id:"authenticate-and-ingest-files-from-cloud-storage",level:3},...a,{value:"Ingest knowledge from URLs",id:"url-flow",level:2},{value:"Monitor ingestion",id:"monitor-ingestion",level:2},{value:"Ingestion performance expectations",id:"ingestion-performance-expectations",level:3},{value:"Troubleshoot ingestion",id:"troubleshoot-ingestion",level:2},{value:"See also",id:"see-also",level:2}];function f(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,t.jsxs)(t.Fragment,{children:[(0,t.jsxs)(n.p,{children:["Upload documents to your ",(0,t.jsx)(n.a,{href:"/knowledge",children:"OpenRAG OpenSearch instance"})," to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.\nDocuments are processed through OpenRAG's knowledge ingestion flows with Docling."]}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG can ingest knowledge from direct file uploads, URLs, and OAuth authenticated connectors."}),"\n",(0,t.jsxs)(n.p,{children:["Knowledge ingestion is powered by OpenRAG's built-in knowledge ingestion flows that use Docling to process documents before storing the documents in your OpenSearch database.\nDuring ingestion, documents are broken into smaller chunks of content that are then embedded using your selected ",(0,t.jsx)(n.a,{href:"/knowledge#set-the-embedding-model-and-dimensions",children:"embedding model"}),".\nThen, the chunks, embeddings, and associated metadata (which connects chunks of the same document) are stored in your OpenSearch database."]}),"\n",(0,t.jsxs)(n.p,{children:["To modify chunking behavior and other ingestion settings, see ",(0,t.jsx)(n.a,{href:"/knowledge#knowledge-ingestion-settings",children:"Knowledge ingestion settings"})," and ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"ingest-local-files-and-folders",children:"Ingest local files and folders"}),"\n",(0,t.jsx)(n.p,{children:"You can upload files and folders from your local machine to your knowledge base:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"})," to view your OpenSearch knowledge base."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Add Knowledge"})," to add your own documents to your OpenRAG knowledge base."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["To upload one file, click ",(0,t.jsx)(r.A,{name:"File","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"File"}),". To upload all documents in a folder, click ",(0,t.jsx)(r.A,{name:"Folder","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Folder"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default path is the ",(0,t.jsx)(n.code,{children:"./documents"})," subdirectory in your OpenRAG installation directory.\nTo change this path, see ",(0,t.jsx)(n.a,{href:"/knowledge#set-the-local-documents-path",children:"Set the local documents path"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["The selected files are processed in the background through the ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow."]}),"\n",(0,t.jsx)(p,{}),"\n",(0,t.jsxs)(n.p,{children:["You can ",(0,t.jsx)(n.a,{href:"#monitor-ingestion",children:"monitor ingestion"})," to see the progress of the uploads and check for failed uploads."]}),"\n",(0,t.jsx)(n.h2,{id:"ingest-local-files-temporarily",children:"Ingest local files temporarily"}),"\n",(0,t.jsx)(d.Ay,{}),"\n",(0,t.jsx)(n.h2,{id:"oauth-ingestion",children:"Ingest files with OAuth connectors"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG can use OAuth authenticated connectors to ingest documents from the following external services:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"AWS S3"}),"\n",(0,t.jsx)(n.li,{children:"Google Drive"}),"\n",(0,t.jsx)(n.li,{children:"Microsoft OneDrive"}),"\n",(0,t.jsx)(n.li,{children:"Microsoft Sharepoint"}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"These connectors enable seamless ingestion of files from cloud storage to your OpenRAG knowledge base."}),"\n",(0,t.jsx)(n.p,{children:"Individual users can connect their personal cloud storage accounts to OpenRAG. Each user must separately authorize OpenRAG to access their own cloud storage. When a user connects a cloud storage service, they are redirected to authenticate with that service provider and grant OpenRAG permission to sync documents from their personal cloud storage."}),"\n",(0,t.jsx)(n.h3,{id:"enable-oauth-connectors",children:"Enable OAuth connectors"}),"\n",(0,t.jsx)(n.p,{children:"Before users can connect their own cloud storage accounts, you must configure the provider's OAuth credentials in OpenRAG. Typically, this requires that you register OpenRAG as an OAuth application in your cloud provider, and then obtain the app's OAuth credentials, such as a client ID and secret key.\nTo enable multiple connectors, you must register an app and generate credentials for each provider."}),"\n",(0,t.jsxs)(l.A,{children:[(0,t.jsxs)(c.A,{value:"TUI",label:"TUI Advanced Setup",default:!0,children:[(0,t.jsxs)(n.p,{children:["If you use the TUI to manage your OpenRAG containers, provide OAuth credentials in the ",(0,t.jsx)(n.strong,{children:"Advanced Setup"}),"."]}),(0,t.jsxs)(n.p,{children:["You can do this during ",(0,t.jsx)(n.a,{href:"/install#setup",children:"installation"}),", or you can add the credentials afterwards:"]}),(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["If OpenRAG is running, stop it: Go to ",(0,t.jsx)(n.a,{href:"/install#tui-container-management",children:(0,t.jsx)(n.strong,{children:"Status"})}),", and then click ",(0,t.jsx)(n.strong,{children:"Stop Services"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Advanced Setup"}),", and then add the OAuth credentials for the cloud storage providers that you want to use:"]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Amazon"}),": Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on ",(0,t.jsx)(n.a,{href:"https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html",children:"Configuring access to AWS applications"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Google"}),": Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the ",(0,t.jsx)(n.a,{href:"https://console.cloud.google.com/apis/credentials",children:"Google Cloud Console"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client documentation"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Microsoft"}),": For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online",children:"Azure application registration credentials for SharePoint and OneDrive"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client documentation"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"The OpenRAG TUI presents redirect URIs for your OAuth app that you must register with your OAuth provider.\nThese are the URLs your OAuth provider will redirect back to after users authenticate and grant access to their cloud storage."}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Save Configuration"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG regenerates the ",(0,t.jsx)(n.a,{href:"/reference/configuration",children:(0,t.jsx)(n.code,{children:".env"})})," file with the given credentials."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Start Container Services"}),"."]}),"\n"]}),"\n"]})]}),(0,t.jsxs)(c.A,{value:"env",label:"Docker Compose .env file",children:[(0,t.jsxs)(n.p,{children:["If you ",(0,t.jsx)(n.a,{href:"/docker",children:"install OpenRAG with self-managed containers"}),", set OAuth credentials in the ",(0,t.jsx)(n.code,{children:".env"})," file for Docker Compose."]}),(0,t.jsxs)(n.p,{children:["You can do this during ",(0,t.jsx)(n.a,{href:"/docker#install-openrag-with-docker-compose",children:"initial set up"}),", or you can add the credentials afterwards:"]}),(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Stop your OpenRAG deployment."}),"\n",(0,t.jsxs)(l.A,{children:[(0,t.jsx)(c.A,{value:"podman",label:"Podman",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"podman stop --all\n"})})}),(0,t.jsx)(c.A,{value:"docker",label:"Docker",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"docker stop $(docker ps -q)\n"})})})]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Edit the ",(0,t.jsx)(n.code,{children:".env"})," file for Docker Compose to add the OAuth credentials for the cloud storage providers that you want to use:"]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Amazon"}),": Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on ",(0,t.jsx)(n.a,{href:"https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html",children:"Configuring access to AWS applications"}),"."]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-env",children:"AWS_ACCESS_KEY_ID=\nAWS_SECRET_ACCESS_KEY=\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Google"}),": Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the ",(0,t.jsx)(n.a,{href:"https://console.cloud.google.com/apis/credentials",children:"Google Cloud Console"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client documentation"}),"."]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-env",children:"GOOGLE_OAUTH_CLIENT_ID=\nGOOGLE_OAUTH_CLIENT_SECRET=\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Microsoft"}),": For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online",children:"Azure application registration credentials for SharePoint and OneDrive"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client documentation"}),"."]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-env",children:"MICROSOFT_GRAPH_OAUTH_CLIENT_ID=\nMICROSOFT_GRAPH_OAUTH_CLIENT_SECRET=\n"})}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Save the ",(0,t.jsx)(n.code,{children:".env"})," file."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Restart your OpenRAG deployment:"}),"\n",(0,t.jsxs)(l.A,{children:[(0,t.jsx)(c.A,{value:"podman",label:"Podman",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"podman-compose up -d\n"})})}),(0,t.jsx)(c.A,{value:"docker",label:"Docker",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"docker-compose up -d\n"})})})]}),"\n"]}),"\n"]})]})]}),"\n",(0,t.jsx)(n.h3,{id:"authenticate-and-ingest-files-from-cloud-storage",children:"Authenticate and ingest files from cloud storage"}),"\n",(0,t.jsxs)(n.p,{children:["After you start OpenRAG with OAuth connectors enabled, each user is prompted to authenticate with the OAuth provider upon accessing your OpenRAG instance.\nIndividual authentication is required to access a user's cloud storage from your OpenRAG instance.\nFor example, if a user navigates to the default OpenRAG URL at ",(0,t.jsx)(n.code,{children:"http://localhost:3000"}),", they are redirected to the OAuth provider's sign-in page.\nAfter authenticating and granting the required permissions for OpenRAG, the user is redirected back to OpenRAG."]}),"\n",(0,t.jsx)(n.p,{children:"To ingest knowledge with an OAuth connector, do the following:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"})," to view your OpenSearch knowledge base."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then select a storage provider."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["On the ",(0,t.jsx)(n.strong,{children:"Add Cloud Knowledge"})," page, click ",(0,t.jsx)(n.strong,{children:"Add Files"}),", and then select the files and folders to ingest from the connected storage."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Ingest Files"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["The selected files are processed in the background through the ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow."]}),"\n",(0,t.jsx)(p,{}),"\n",(0,t.jsxs)(n.p,{children:["You can ",(0,t.jsx)(n.a,{href:"#monitor-ingestion",children:"monitor ingestion"})," to see the progress of the uploads and check for failed uploads."]}),"\n",(0,t.jsx)(n.h2,{id:"url-flow",children:"Ingest knowledge from URLs"}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenSearch URL Ingestion"})," flow is used to ingest web content from URLs.\nThis flow isn't directly accessible from the OpenRAG user interface.\nInstead, this flow is called by the ",(0,t.jsxs)(n.a,{href:"/chat#flow",children:[(0,t.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow"]})," as a Model Context Protocol (MCP) tool.\nThe agent can call this component to fetch web content from a given URL, and then ingest that content into your OpenSearch knowledge base."]}),"\n",(0,t.jsxs)(n.p,{children:["Like all OpenRAG flows, you can ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", and you can customize it."]}),"\n",(0,t.jsxs)(n.p,{children:["For more information about MCP in Langflow, see the Langflow documentation on ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-client",children:"MCP clients"})," and ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-tutorial",children:"MCP servers"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"monitor-ingestion",children:"Monitor ingestion"}),"\n",(0,t.jsx)(n.p,{children:"Document ingestion tasks run in the background."}),"\n",(0,t.jsxs)(n.p,{children:["In the OpenRAG user interface, a badge is shown on ",(0,t.jsx)(r.A,{name:"Bell","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Tasks"})," when OpenRAG tasks are active.\nClick ",(0,t.jsx)(r.A,{name:"Bell","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Tasks"})," to inspect and cancel tasks:"]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Active Tasks"}),": All tasks that are ",(0,t.jsx)(n.strong,{children:"Pending"}),", ",(0,t.jsx)(n.strong,{children:"Running"}),", or ",(0,t.jsx)(n.strong,{children:"Processing"}),".\nFor each active task, depending on its state, you can find the task ID, start time, duration, number of files processed, and the total files enqueued for processing."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Pending"}),": The task is queued and waiting to start."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Running"}),": The task is actively processing files."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Processing"}),": The task is performing ingestion operations."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Failed"}),": Something went wrong during ingestion, or the task was manually canceled.\nFor troubleshooting advice, see ",(0,t.jsx)(n.a,{href:"#troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["To stop an active task, click ",(0,t.jsx)(r.A,{name:"X","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Cancel"}),". Canceling a task stops processing immediately and marks the task as ",(0,t.jsx)(n.strong,{children:"Failed"}),"."]}),"\n",(0,t.jsx)(n.h3,{id:"ingestion-performance-expectations",children:"Ingestion performance expectations"}),"\n",(0,t.jsx)(n.p,{children:"The following performance test was conducted with Docling Serve."}),"\n",(0,t.jsx)(n.p,{children:"On a local VM with 7 vCPUs and 8 GiB RAM, OpenRAG ingested approximately 5.03 GB across 1,083 files in about 42 minutes.\nThis equates to approximately 2.4 documents per second."}),"\n",(0,t.jsx)(n.p,{children:"You can generally expect equal or better performance on developer laptops, and significantly faster performance on servers.\nThroughput scales with CPU cores, memory, storage speed, and configuration choices, such as the embedding model, chunk size, overlap, and concurrency."}),"\n",(0,t.jsx)(n.p,{children:"This test returned 12 error, approximately 1.1 percent of the total files ingested.\nAll errors were file-specific, and they didn't stop the pipeline."}),"\n",(0,t.jsxs)(s,{children:[(0,t.jsx)("summary",{children:"Ingestion performance test details"}),(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Ingestion dataset:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Total files: 1,083 items mounted"}),"\n",(0,t.jsx)(n.li,{children:"Total size on disk: 5,026,474,862 bytes (approximately 5.03 GB)"}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Hardware specifications:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Machine: Apple M4 Pro"}),"\n",(0,t.jsxs)(n.li,{children:["Podman VM:","\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Name: podman-machine-default"}),"\n",(0,t.jsx)(n.li,{children:"Type: applehv"}),"\n",(0,t.jsx)(n.li,{children:"vCPUs: 7"}),"\n",(0,t.jsx)(n.li,{children:"Memory: 8 GiB"}),"\n",(0,t.jsx)(n.li,{children:"Disk size: 100 GiB"}),"\n"]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Test results:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-text",children:"2025-09-24T22:40:45.542190Z /app/src/main.py:231 Ingesting default documents when ready disable_langflow_ingest=False\n2025-09-24T22:40:45.546385Z /app/src/main.py:270 Using Langflow ingestion pipeline for default documents file_count=1082\n...\n2025-09-24T23:19:44.866365Z /app/src/main.py:351 Langflow ingestion completed success_count=1070 error_count=12 total_files=1082\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Elapsed time: Approximately 42 minutes 15 seconds (2,535 seconds)"}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Throughput: Approximately 2.4 documents per second"}),"\n"]}),"\n"]})]}),"\n",(0,t.jsx)(n.h2,{id:"troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"\n",(0,t.jsx)(n.p,{children:"If an ingestion task fails, do the following:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Make sure you are uploading supported file types."}),"\n",(0,t.jsx)(n.li,{children:"Split excessively large files into smaller files before uploading."}),"\n",(0,t.jsx)(n.li,{children:"Remove unusual embedded content, such as videos or animations, before uploading. Although Docling can replace some non-text content with placeholders during ingestion, some embedded content might cause errors."}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["If the OpenRAG ",(0,t.jsx)(n.strong,{children:"Chat"})," doesn't seem to use your documents correctly, ",(0,t.jsx)(n.a,{href:"#browse-knowledge",children:"browse your knowledge base"})," to confirm that the documents are uploaded in full, and the chunks are correct."]}),"\n",(0,t.jsxs)(n.p,{children:["If the documents are present and well-formed, check your ",(0,t.jsx)(n.a,{href:"/knowledge-filters",children:"knowledge filters"}),".\nIf a global filter is applied, make sure the expected documents are included in the global filter.\nIf the global filter excludes any documents, the agent cannot access those documents unless you apply a chat-level filter or change the global filter."]}),"\n",(0,t.jsx)(n.p,{children:"If text is missing or incorrectly processed, you need to reupload the documents after modifying the ingestion parameters or the documents themselves.\nFor example:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Break combined documents into separate files for better metadata context."}),"\n",(0,t.jsxs)(n.li,{children:["Make sure scanned documents are legible enough for extraction, and enable the ",(0,t.jsx)(n.strong,{children:"OCR"})," option. Poorly scanned documents might require additional preparation or rescanning before ingestion."]}),"\n",(0,t.jsxs)(n.li,{children:["Adjust the ",(0,t.jsx)(n.strong,{children:"Chunk Size"})," and ",(0,t.jsx)(n.strong,{children:"Chunk Overlap"})," settings to better suit your documents. Larger chunks provide more context but can include irrelevant information, while smaller chunks yield more precise semantic search but can lack context."]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["For more information about modifying ingestion parameters and flows, see ",(0,t.jsx)(n.a,{href:"/knowledge#knowledge-ingestion-settings",children:"Knowledge ingestion settings"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"see-also",children:"See also"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/knowledge",children:"Configure knowledge"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/knowledge-filters",children:"Filter knowledge"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/chat",children:"Chat with knowledge"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"})}),"\n"]})]})}function m(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(f,{...e})}):f(e)}},8401:(e,n,s)=>{s.d(n,{Ay:()=>c,RM:()=>r});var o=s(4848),t=s(8453),i=s(7733);const r=[];function l(e){const n={p:"p",strong:"strong",...(0,t.R)(),...e.components};return(0,o.jsxs)(n.p,{children:["When using the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", click ",(0,o.jsx)(i.A,{name:"Plus","aria-hidden":"true"})," in the chat input field to upload a file to the current chat session.\nFiles added this way are processed and made available to the agent for the current conversation only.\nThese files aren't stored in the knowledge base permanently."]})}function c(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(l,{...e})}):l(e)}}}]); \ No newline at end of file +"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[6919],{5421:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>x,contentTitle:()=>g,default:()=>m,frontMatter:()=>u,metadata:()=>o,toc:()=>j});const o=JSON.parse('{"id":"core-components/ingestion","title":"Ingest knowledge","description":"Upload documents to your OpenRAG OpenSearch instance to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.","source":"@site/docs/core-components/ingestion.mdx","sourceDirName":"core-components","slug":"/ingestion","permalink":"/ingestion","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/ingestion.mdx","tags":[],"version":"current","frontMatter":{"title":"Ingest knowledge","slug":"/ingestion"},"sidebar":"tutorialSidebar","previous":{"title":"Configure knowledge","permalink":"/knowledge"},"next":{"title":"Filter knowledge","permalink":"/knowledge-filters"}}');var t=s(4848),i=s(8453),r=s(7733),l=s(1470),c=s(9365),d=s(8401);const a=[];function h(e){const n={a:"a",code:"code",li:"li",p:"p",strong:"strong",ul:"ul",...(0,i.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,t.jsxs)(s,{children:[(0,t.jsx)("summary",{children:"About the OpenSearch Ingestion flow"}),(0,t.jsxs)(n.p,{children:["When you upload documents locally or with OAuth connectors, the ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow runs in the background.\nBy default, this flow uses Docling Serve to import and process documents."]}),(0,t.jsxs)(n.p,{children:["Like all ",(0,t.jsx)(n.a,{href:"/agents",children:"OpenRAG flows"}),", you can ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", and you can customize it if you want to change the knowledge ingestion settings."]}),(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow is comprised of several components that work together to process and store documents in your knowledge base:"]}),(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-docling#docling-serve",children:[(0,t.jsx)(n.strong,{children:"Docling Serve"})," component"]}),": Ingests files and processes them by connecting to OpenRAG's local Docling Serve service. The output is ",(0,t.jsx)(n.code,{children:"DoclingDocument"})," data that contains the extracted text and metadata from the documents."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-docling#export-doclingdocument",children:[(0,t.jsx)(n.strong,{children:"Export DoclingDocument"})," component"]}),": Exports processed ",(0,t.jsx)(n.code,{children:"DoclingDocument"})," data to Markdown format with image placeholders. This conversion standardizes the document data in preparation for further processing."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#dataframe-operations",children:[(0,t.jsx)(n.strong,{children:"DataFrame Operations"})," component"]}),": Three of these components run sequentially to add metadata to the document data: ",(0,t.jsx)(n.code,{children:"filename"}),", ",(0,t.jsx)(n.code,{children:"file_size"}),", and ",(0,t.jsx)(n.code,{children:"mimetype"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-processing#split-text",children:[(0,t.jsx)(n.strong,{children:"Split Text"})," component"]}),": Splits the processed text into chunks, based on the configured ",(0,t.jsx)(n.a,{href:"/knowledge#knowledge-ingestion-settings",children:"chunk size and overlap settings"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Secret Input"})," component: If needed, four of these components securely fetch the ",(0,t.jsx)(n.a,{href:"/knowledge#auth",children:"OAuth authentication"})," configuration variables: ",(0,t.jsx)(n.code,{children:"CONNECTOR_TYPE"}),", ",(0,t.jsx)(n.code,{children:"OWNER"}),", ",(0,t.jsx)(n.code,{children:"OWNER_EMAIL"}),", and ",(0,t.jsx)(n.code,{children:"OWNER_NAME"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Create Data"})," component: Combines the authentication credentials from the ",(0,t.jsx)(n.strong,{children:"Secret Input"})," components into a structured data object that is associated with the document embeddings."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,t.jsx)(n.strong,{children:"Embedding Model"})," component"]}),": Generates vector embeddings using your selected ",(0,t.jsx)(n.a,{href:"/knowledge#set-the-embedding-model-and-dimensions",children:"embedding model"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,t.jsx)(n.strong,{children:"OpenSearch"})," component"]}),": Stores the processed documents and their embeddings in a ",(0,t.jsx)(n.code,{children:"documents"})," index of your OpenRAG ",(0,t.jsx)(n.a,{href:"/knowledge",children:"OpenSearch knowledge base"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default address for the OpenSearch instance is ",(0,t.jsx)(n.code,{children:"https://opensearch:9200"}),". To change this address, edit the ",(0,t.jsx)(n.code,{children:"OPENSEARCH_PORT"})," ",(0,t.jsx)(n.a,{href:"/reference/configuration#opensearch-settings",children:"environment variable"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default authentication method is JSON Web Token (JWT) authentication. If you ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"edit the flow"}),", you can select ",(0,t.jsx)(n.code,{children:"basic"})," auth mode, which uses the ",(0,t.jsx)(n.code,{children:"OPENSEARCH_USERNAME"})," and ",(0,t.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," ",(0,t.jsx)(n.a,{href:"/reference/configuration#opensearch-settings",children:"environment variables"})," for authentication instead of JWT."]}),"\n"]}),"\n"]})]})}function p(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(h,{...e})}):h(e)}const u={title:"Ingest knowledge",slug:"/ingestion"},g=void 0,x={},j=[{value:"Ingest local files and folders",id:"ingest-local-files-and-folders",level:2},...a,{value:"Ingest local files temporarily",id:"ingest-local-files-temporarily",level:2},...d.RM,{value:"Ingest files with OAuth connectors",id:"oauth-ingestion",level:2},{value:"Enable OAuth connectors",id:"enable-oauth-connectors",level:3},{value:"Authenticate and ingest files from cloud storage",id:"authenticate-and-ingest-files-from-cloud-storage",level:3},...a,{value:"Ingest knowledge from URLs",id:"url-flow",level:2},{value:"Monitor ingestion",id:"monitor-ingestion",level:2},{value:"Ingestion performance expectations",id:"ingestion-performance-expectations",level:3},{value:"Troubleshoot ingestion",id:"troubleshoot-ingestion",level:2},{value:"See also",id:"see-also",level:2}];function f(e){const n={a:"a",code:"code",h2:"h2",h3:"h3",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,i.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,t.jsxs)(t.Fragment,{children:[(0,t.jsxs)(n.p,{children:["Upload documents to your ",(0,t.jsx)(n.a,{href:"/knowledge",children:"OpenRAG OpenSearch instance"})," to populate your knowledge base with unique content, such as your own company documents, research papers, or websites.\nDocuments are processed through OpenRAG's knowledge ingestion flows with Docling."]}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG can ingest knowledge from direct file uploads, URLs, and OAuth authenticated connectors."}),"\n",(0,t.jsxs)(n.p,{children:["Knowledge ingestion is powered by OpenRAG's built-in knowledge ingestion flows that use Docling to process documents before storing the documents in your OpenSearch database.\nDuring ingestion, documents are broken into smaller chunks of content that are then embedded using your selected ",(0,t.jsx)(n.a,{href:"/knowledge#set-the-embedding-model-and-dimensions",children:"embedding model"}),".\nThen, the chunks, embeddings, and associated metadata (which connects chunks of the same document) are stored in your OpenSearch database."]}),"\n",(0,t.jsxs)(n.p,{children:["To modify chunking behavior and other ingestion settings, see ",(0,t.jsx)(n.a,{href:"/knowledge#knowledge-ingestion-settings",children:"Knowledge ingestion settings"})," and ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"ingest-local-files-and-folders",children:"Ingest local files and folders"}),"\n",(0,t.jsx)(n.p,{children:"You can upload files and folders from your local machine to your knowledge base:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"})," to view your OpenSearch knowledge base."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Add Knowledge"})," to add your own documents to your OpenRAG knowledge base."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["To upload one file, click ",(0,t.jsx)(r.A,{name:"File","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"File"}),". To upload all documents in a folder, click ",(0,t.jsx)(r.A,{name:"Folder","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Folder"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["The default path is the ",(0,t.jsx)(n.code,{children:"./documents"})," subdirectory in your OpenRAG installation directory.\nTo change this path, see ",(0,t.jsx)(n.a,{href:"/knowledge#set-the-local-documents-path",children:"Set the local documents path"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["The selected files are processed in the background through the ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow."]}),"\n",(0,t.jsx)(p,{}),"\n",(0,t.jsxs)(n.p,{children:["You can ",(0,t.jsx)(n.a,{href:"#monitor-ingestion",children:"monitor ingestion"})," to see the progress of the uploads and check for failed uploads."]}),"\n",(0,t.jsx)(n.h2,{id:"ingest-local-files-temporarily",children:"Ingest local files temporarily"}),"\n",(0,t.jsx)(d.Ay,{}),"\n",(0,t.jsx)(n.h2,{id:"oauth-ingestion",children:"Ingest files with OAuth connectors"}),"\n",(0,t.jsx)(n.p,{children:"OpenRAG can use OAuth authenticated connectors to ingest documents from the following external services:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"AWS S3"}),"\n",(0,t.jsx)(n.li,{children:"Google Drive"}),"\n",(0,t.jsx)(n.li,{children:"Microsoft OneDrive"}),"\n",(0,t.jsx)(n.li,{children:"Microsoft Sharepoint"}),"\n"]}),"\n",(0,t.jsx)(n.p,{children:"These connectors enable seamless ingestion of files from cloud storage to your OpenRAG knowledge base."}),"\n",(0,t.jsx)(n.p,{children:"Individual users can connect their personal cloud storage accounts to OpenRAG. Each user must separately authorize OpenRAG to access their own cloud storage. When a user connects a cloud storage service, they are redirected to authenticate with that service provider and grant OpenRAG permission to sync documents from their personal cloud storage."}),"\n",(0,t.jsx)(n.h3,{id:"enable-oauth-connectors",children:"Enable OAuth connectors"}),"\n",(0,t.jsx)(n.p,{children:"Before users can connect their own cloud storage accounts, you must configure the provider's OAuth credentials in OpenRAG. Typically, this requires that you register OpenRAG as an OAuth application in your cloud provider, and then obtain the app's OAuth credentials, such as a client ID and secret key.\nTo enable multiple connectors, you must register an app and generate credentials for each provider."}),"\n",(0,t.jsxs)(l.A,{children:[(0,t.jsxs)(c.A,{value:"TUI",label:"TUI Advanced Setup",default:!0,children:[(0,t.jsxs)(n.p,{children:["If you use the TUI to manage your OpenRAG containers, provide OAuth credentials in the ",(0,t.jsx)(n.strong,{children:"Advanced Setup"}),"."]}),(0,t.jsxs)(n.p,{children:["You can do this during ",(0,t.jsx)(n.a,{href:"/install#setup",children:"installation"}),", or you can add the credentials afterwards:"]}),(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["If OpenRAG is running, stop it: Go to ",(0,t.jsx)(n.a,{href:"/install#tui-container-management",children:(0,t.jsx)(n.strong,{children:"Status"})}),", and then click ",(0,t.jsx)(n.strong,{children:"Stop Services"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Advanced Setup"}),", and then add the OAuth credentials for the cloud storage providers that you want to use:"]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Amazon"}),": Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on ",(0,t.jsx)(n.a,{href:"https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html",children:"Configuring access to AWS applications"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Google"}),": Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the ",(0,t.jsx)(n.a,{href:"https://console.cloud.google.com/apis/credentials",children:"Google Cloud Console"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client documentation"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Microsoft"}),": For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online",children:"Azure application registration credentials for SharePoint and OneDrive"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client documentation"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"The OpenRAG TUI presents redirect URIs for your OAuth app that you must register with your OAuth provider.\nThese are the URLs your OAuth provider will redirect back to after users authenticate and grant access to their cloud storage."}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Save Configuration"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["OpenRAG regenerates the ",(0,t.jsx)(n.a,{href:"/reference/configuration",children:(0,t.jsx)(n.code,{children:".env"})})," file with the given credentials."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Start Container Services"}),"."]}),"\n"]}),"\n"]})]}),(0,t.jsxs)(c.A,{value:"env",label:"Docker Compose .env file",children:[(0,t.jsxs)(n.p,{children:["If you ",(0,t.jsx)(n.a,{href:"/docker",children:"install OpenRAG with self-managed containers"}),", set OAuth credentials in the ",(0,t.jsx)(n.code,{children:".env"})," file for Docker Compose."]}),(0,t.jsxs)(n.p,{children:["You can do this during ",(0,t.jsx)(n.a,{href:"/docker#install-openrag-with-docker-compose",children:"initial set up"}),", or you can add the credentials afterwards:"]}),(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Stop your OpenRAG deployment."}),"\n",(0,t.jsxs)(l.A,{children:[(0,t.jsx)(c.A,{value:"podman",label:"Podman",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"podman stop --all\n"})})}),(0,t.jsx)(c.A,{value:"docker",label:"Docker",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"docker stop $(docker ps -q)\n"})})})]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Edit the ",(0,t.jsx)(n.code,{children:".env"})," file for Docker Compose to add the OAuth credentials for the cloud storage providers that you want to use:"]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Amazon"}),": Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on ",(0,t.jsx)(n.a,{href:"https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html",children:"Configuring access to AWS applications"}),"."]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-env",children:"AWS_ACCESS_KEY_ID=\nAWS_SECRET_ACCESS_KEY=\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Google"}),": Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the ",(0,t.jsx)(n.a,{href:"https://console.cloud.google.com/apis/credentials",children:"Google Cloud Console"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client documentation"}),"."]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-env",children:"GOOGLE_OAUTH_CLIENT_ID=\nGOOGLE_OAUTH_CLIENT_SECRET=\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Microsoft"}),": For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online",children:"Azure application registration credentials for SharePoint and OneDrive"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client documentation"}),"."]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-env",children:"MICROSOFT_GRAPH_OAUTH_CLIENT_ID=\nMICROSOFT_GRAPH_OAUTH_CLIENT_SECRET=\n"})}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Save the ",(0,t.jsx)(n.code,{children:".env"})," file."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Restart your OpenRAG deployment:"}),"\n",(0,t.jsxs)(l.A,{children:[(0,t.jsx)(c.A,{value:"podman",label:"Podman",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"podman-compose up -d\n"})})}),(0,t.jsx)(c.A,{value:"docker",label:"Docker",children:(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"docker-compose up -d\n"})})})]}),"\n"]}),"\n"]})]})]}),"\n",(0,t.jsx)(n.h3,{id:"authenticate-and-ingest-files-from-cloud-storage",children:"Authenticate and ingest files from cloud storage"}),"\n",(0,t.jsxs)(n.p,{children:["After you start OpenRAG with OAuth connectors enabled, each user is prompted to authenticate with the OAuth provider upon accessing your OpenRAG instance.\nIndividual authentication is required to access a user's cloud storage from your OpenRAG instance.\nFor example, if a user navigates to the default OpenRAG URL at ",(0,t.jsx)(n.code,{children:"http://localhost:3000"}),", they are redirected to the OAuth provider's sign-in page.\nAfter authenticating and granting the required permissions for OpenRAG, the user is redirected back to OpenRAG."]}),"\n",(0,t.jsx)(n.p,{children:"To ingest knowledge with an OAuth connector, do the following:"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(r.A,{name:"Library","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Knowledge"})," to view your OpenSearch knowledge base."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Add Knowledge"}),", and then select a storage provider."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["On the ",(0,t.jsx)(n.strong,{children:"Add Cloud Knowledge"})," page, click ",(0,t.jsx)(n.strong,{children:"Add Files"}),", and then select the files and folders to ingest from the connected storage."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Click ",(0,t.jsx)(n.strong,{children:"Ingest Files"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["The selected files are processed in the background through the ",(0,t.jsx)(n.strong,{children:"OpenSearch Ingestion"})," flow."]}),"\n",(0,t.jsx)(p,{}),"\n",(0,t.jsxs)(n.p,{children:["You can ",(0,t.jsx)(n.a,{href:"#monitor-ingestion",children:"monitor ingestion"})," to see the progress of the uploads and check for failed uploads."]}),"\n",(0,t.jsx)(n.h2,{id:"url-flow",children:"Ingest knowledge from URLs"}),"\n",(0,t.jsxs)(n.p,{children:["The ",(0,t.jsx)(n.strong,{children:"OpenSearch URL Ingestion"})," flow is used to ingest web content from URLs.\nThis flow isn't directly accessible from the OpenRAG user interface.\nInstead, this flow is called by the ",(0,t.jsxs)(n.a,{href:"/chat#flow",children:[(0,t.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow"]})," as a Model Context Protocol (MCP) tool.\nThe agent can call this component to fetch web content from a given URL, and then ingest that content into your OpenSearch knowledge base."]}),"\n",(0,t.jsxs)(n.p,{children:["Like all OpenRAG flows, you can ",(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", and you can customize it."]}),"\n",(0,t.jsxs)(n.p,{children:["For more information about MCP in Langflow, see the Langflow documentation on ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-client",children:"MCP clients"})," and ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/mcp-tutorial",children:"MCP servers"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"monitor-ingestion",children:"Monitor ingestion"}),"\n",(0,t.jsx)(n.p,{children:"Document ingestion tasks run in the background."}),"\n",(0,t.jsxs)(n.p,{children:["In the OpenRAG user interface, a badge is shown on ",(0,t.jsx)(r.A,{name:"Bell","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Tasks"})," when OpenRAG tasks are active.\nClick ",(0,t.jsx)(r.A,{name:"Bell","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Tasks"})," to inspect and cancel tasks:"]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Active Tasks"}),": All tasks that are ",(0,t.jsx)(n.strong,{children:"Pending"}),", ",(0,t.jsx)(n.strong,{children:"Running"}),", or ",(0,t.jsx)(n.strong,{children:"Processing"}),".\nFor each active task, depending on its state, you can find the task ID, start time, duration, number of files processed, and the total files enqueued for processing."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Pending"}),": The task is queued and waiting to start."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Running"}),": The task is actively processing files."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Processing"}),": The task is performing ingestion operations."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Failed"}),": Something went wrong during ingestion, or the task was manually canceled.\nFor troubleshooting advice, see ",(0,t.jsx)(n.a,{href:"#troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["To stop an active task, click ",(0,t.jsx)(r.A,{name:"X","aria-hidden":"true"})," ",(0,t.jsx)(n.strong,{children:"Cancel"}),". Canceling a task stops processing immediately and marks the task as ",(0,t.jsx)(n.strong,{children:"Failed"}),"."]}),"\n",(0,t.jsx)(n.h3,{id:"ingestion-performance-expectations",children:"Ingestion performance expectations"}),"\n",(0,t.jsx)(n.p,{children:"The following performance test was conducted with Docling Serve."}),"\n",(0,t.jsx)(n.p,{children:"On a local VM with 7 vCPUs and 8 GiB RAM, OpenRAG ingested approximately 5.03 GB across 1,083 files in about 42 minutes.\nThis equates to approximately 2.4 documents per second."}),"\n",(0,t.jsx)(n.p,{children:"You can generally expect equal or better performance on developer laptops, and significantly faster performance on servers.\nThroughput scales with CPU cores, memory, storage speed, and configuration choices, such as the embedding model, chunk size, overlap, and concurrency."}),"\n",(0,t.jsx)(n.p,{children:"This test returned 12 error, approximately 1.1 percent of the total files ingested.\nAll errors were file-specific, and they didn't stop the pipeline."}),"\n",(0,t.jsxs)(s,{children:[(0,t.jsx)("summary",{children:"Ingestion performance test details"}),(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Ingestion dataset:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Total files: 1,083 items mounted"}),"\n",(0,t.jsx)(n.li,{children:"Total size on disk: 5,026,474,862 bytes (approximately 5.03 GB)"}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Hardware specifications:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Machine: Apple M4 Pro"}),"\n",(0,t.jsxs)(n.li,{children:["Podman VM:","\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Name: podman-machine-default"}),"\n",(0,t.jsx)(n.li,{children:"Type: applehv"}),"\n",(0,t.jsx)(n.li,{children:"vCPUs: 7"}),"\n",(0,t.jsx)(n.li,{children:"Memory: 8 GiB"}),"\n",(0,t.jsx)(n.li,{children:"Disk size: 100 GiB"}),"\n"]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Test results:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-text",children:"2025-09-24T22:40:45.542190Z /app/src/main.py:231 Ingesting default documents when ready disable_langflow_ingest=False\n2025-09-24T22:40:45.546385Z /app/src/main.py:270 Using Langflow ingestion pipeline for default documents file_count=1082\n...\n2025-09-24T23:19:44.866365Z /app/src/main.py:351 Langflow ingestion completed success_count=1070 error_count=12 total_files=1082\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Elapsed time: Approximately 42 minutes 15 seconds (2,535 seconds)"}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Throughput: Approximately 2.4 documents per second"}),"\n"]}),"\n"]})]}),"\n",(0,t.jsx)(n.h2,{id:"troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"\n",(0,t.jsx)(n.p,{children:"If an ingestion task fails, do the following:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Make sure you are uploading supported file types."}),"\n",(0,t.jsx)(n.li,{children:"Split excessively large files into smaller files before uploading."}),"\n",(0,t.jsx)(n.li,{children:"Remove unusual embedded content, such as videos or animations, before uploading. Although Docling can replace some non-text content with placeholders during ingestion, some embedded content might cause errors."}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["If the OpenRAG ",(0,t.jsx)(n.strong,{children:"Chat"})," doesn't seem to use your documents correctly, ",(0,t.jsx)(n.a,{href:"/knowledge#browse-knowledge",children:"browse your knowledge base"})," to confirm that the documents are uploaded in full, and the chunks are correct."]}),"\n",(0,t.jsxs)(n.p,{children:["If the documents are present and well-formed, check your ",(0,t.jsx)(n.a,{href:"/knowledge-filters",children:"knowledge filters"}),".\nIf a global filter is applied, make sure the expected documents are included in the global filter.\nIf the global filter excludes any documents, the agent cannot access those documents unless you apply a chat-level filter or change the global filter."]}),"\n",(0,t.jsx)(n.p,{children:"If text is missing or incorrectly processed, you need to reupload the documents after modifying the ingestion parameters or the documents themselves.\nFor example:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:"Break combined documents into separate files for better metadata context."}),"\n",(0,t.jsxs)(n.li,{children:["Make sure scanned documents are legible enough for extraction, and enable the ",(0,t.jsx)(n.strong,{children:"OCR"})," option. Poorly scanned documents might require additional preparation or rescanning before ingestion."]}),"\n",(0,t.jsxs)(n.li,{children:["Adjust the ",(0,t.jsx)(n.strong,{children:"Chunk Size"})," and ",(0,t.jsx)(n.strong,{children:"Chunk Overlap"})," settings to better suit your documents. Larger chunks provide more context but can include irrelevant information, while smaller chunks yield more precise semantic search but can lack context."]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["For more information about modifying ingestion parameters and flows, see ",(0,t.jsx)(n.a,{href:"/knowledge#knowledge-ingestion-settings",children:"Knowledge ingestion settings"}),"."]}),"\n",(0,t.jsx)(n.h2,{id:"see-also",children:"See also"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/knowledge",children:"Configure knowledge"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/knowledge-filters",children:"Filter knowledge"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/chat",children:"Chat with knowledge"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"})}),"\n"]})]})}function m(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(f,{...e})}):f(e)}},8401:(e,n,s)=>{s.d(n,{Ay:()=>c,RM:()=>r});var o=s(4848),t=s(8453),i=s(7733);const r=[];function l(e){const n={p:"p",strong:"strong",...(0,t.R)(),...e.components};return(0,o.jsxs)(n.p,{children:["When using the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", click ",(0,o.jsx)(i.A,{name:"Plus","aria-hidden":"true"})," in the chat input field to upload a file to the current chat session.\nFiles added this way are processed and made available to the agent for the current conversation only.\nThese files aren't stored in the knowledge base permanently."]})}function c(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(l,{...e})}):l(e)}}}]); \ No newline at end of file diff --git a/assets/js/d0314b07.26db9c14.js b/assets/js/d0314b07.26db9c14.js new file mode 100644 index 00000000..dbb80a6a --- /dev/null +++ b/assets/js/d0314b07.26db9c14.js @@ -0,0 +1 @@ +"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[5750],{3656:(e,n,s)=>{s.d(n,{Ay:()=>p,RM:()=>d});var r=s(4848),i=s(8453),t=s(7733),o=s(1470),l=s(9365);function a(e){const n={a:"a",code:"code",li:"li",ol:"ol",p:"p",strong:"strong",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"Using Ollama for your OpenRAG language model provider offers greater flexibility and configuration, but can also be overwhelming to start.\nThese recommendations are a reasonable starting point for users with at least one GPU and experience running LLMs locally."}),"\n",(0,r.jsxs)(n.p,{children:["For best performance, OpenRAG recommends OpenAI's ",(0,r.jsx)(n.code,{children:"gpt-oss:20b"})," language model. However, this model uses 16GB of RAM, so consider using Ollama Cloud or running Ollama on a remote machine."]}),"\n",(0,r.jsxs)(n.p,{children:["For generating embeddings, OpenRAG recommends the ",(0,r.jsx)(n.a,{href:"https://ollama.com/library/nomic-embed-text",children:(0,r.jsx)(n.code,{children:"nomic-embed-text"})})," embedding model, which provides high-quality embeddings optimized for retrieval tasks."]}),"\n",(0,r.jsxs)(n.p,{children:["To run models in ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/cloud",children:(0,r.jsx)(n.strong,{children:"Ollama Cloud"})}),", follow these steps:"]}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Sign in to Ollama Cloud.\nIn a terminal, enter ",(0,r.jsx)(n.code,{children:"ollama signin"})," to connect your local environment with Ollama Cloud."]}),"\n",(0,r.jsxs)(n.li,{children:["To run the model, in Ollama, select the ",(0,r.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model, or run ",(0,r.jsx)(n.code,{children:"ollama run gpt-oss:20b-cloud"})," in a terminal.\nOllama Cloud models are run at the same URL as your local Ollama server at ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),", and automatically offloaded to Ollama's cloud service."]}),"\n",(0,r.jsxs)(n.li,{children:["Connect OpenRAG to the same local Ollama server as you would for local models in onboarding, using the default address of ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the ",(0,r.jsx)(n.strong,{children:"Language model"})," field, select the ",(0,r.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model."]}),"\n"]}),"\n",(0,r.jsx)("br",{}),"\n",(0,r.jsxs)(n.p,{children:["To run models on a ",(0,r.jsx)(n.strong,{children:"remote Ollama server"}),", follow these steps:"]}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsx)(n.li,{children:"Ensure your remote Ollama server is accessible from your OpenRAG instance."}),"\n",(0,r.jsxs)(n.li,{children:["In the ",(0,r.jsx)(n.strong,{children:"Ollama Base URL"})," field, enter your remote Ollama server's base URL, such as ",(0,r.jsx)(n.code,{children:"http://your-remote-server:11434"}),".\nOpenRAG connects to the remote Ollama server and populates the lists with the server's available models."]}),"\n",(0,r.jsxs)(n.li,{children:["Select your ",(0,r.jsx)(n.strong,{children:"Embedding model"})," and ",(0,r.jsx)(n.strong,{children:"Language model"})," from the available options."]}),"\n"]})]})}function c(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(a,{...e})}):a(e)}const d=[{value:"Application onboarding",id:"application-onboarding",level:2}];function h(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,i.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,r.jsx)(n.p,{children:"The first time you start OpenRAG, regardless of how you installed it, you must complete application onboarding."}),"\n",(0,r.jsxs)(n.p,{children:["Some of these variables, such as the embedding models, can be changed seamlessly after onboarding.\nOthers are immutable and require you to destroy and recreate the OpenRAG containers.\nFor more information, see ",(0,r.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"You can use different providers for your language model and embedding model, such as Anthropic for the language model and OpenAI for the embeddings model.\nAdditionally, you can set multiple embedding models."}),"\n",(0,r.jsx)(n.p,{children:"You only need to complete onboarding for your preferred providers."}),"\n",(0,r.jsxs)(o.A,{groupId:"Provider",children:[(0,r.jsxs)(l.A,{value:"Anthropic",label:"Anthropic",default:!0,children:[(0,r.jsx)(n.admonition,{type:"info",children:(0,r.jsx)(n.p,{children:"Anthropic doesn't provide embedding models. If you select Anthropic for your language model, you must select a different provider for embeddings."})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Use environment Anthropic API key"})," to automatically use your key from the ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an Anthropic API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,r.jsx)(t.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]}),(0,r.jsx)(l.A,{value:"OpenAI",label:"OpenAI",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,r.jsx)(t.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsx)(l.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Complete the fields for ",(0,r.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,r.jsx)(n.strong,{children:"IBM Project ID"}),", and ",(0,r.jsx)(n.strong,{children:"IBM API key"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,r.jsx)(t.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsxs)(l.A,{value:"Ollama",label:"Ollama",children:[(0,r.jsx)(n.admonition,{type:"info",children:(0,r.jsxs)(n.p,{children:["Ollama isn't installed with OpenRAG. To install Ollama, see the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["To connect to an Ollama server running on your local machine, enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG connects to the Ollama server and populates the model lists with the server's available models."]}),"\n",(0,r.jsxs)(n.li,{children:["Select the ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.","\n",(0,r.jsxs)(s,{closed:!0,children:[(0,r.jsx)("summary",{children:"Ollama model selection and external server configuration"}),(0,r.jsx)(c,{})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function p(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(h,{...e})}):h(e)}},3862:(e,n,s)=>{s.d(n,{Ay:()=>l,RM:()=>t});var r=s(4848),i=s(8453);const t=[];function o(e){const n={a:"a",admonition:"admonition",code:"code",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install",children:"Install WSL"})," with the Ubuntu distribution using WSL 2:"]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-powershell",children:"wsl --install -d Ubuntu\n"})}),"\n",(0,r.jsxs)(n.p,{children:["For new installations, the ",(0,r.jsx)(n.code,{children:"wsl --install"})," command uses WSL 2 and Ubuntu by default."]}),"\n",(0,r.jsxs)(n.p,{children:["For existing WSL installations, you can ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#change-the-default-linux-distribution-installed",children:"change the distribution"})," and ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#upgrade-version-from-wsl-1-to-wsl-2",children:"check the WSL version"}),"."]}),"\n",(0,r.jsx)(n.admonition,{title:"Known limitation",type:"warning",children:(0,r.jsx)(n.p,{children:"OpenRAG isn't compatible with nested virtualization, which can cause networking issues.\nDon't install OpenRAG on a WSL distribution that is installed inside a Windows VM.\nInstead, install OpenRAG on your base OS or a non-nested Linux VM."})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#ways-to-run-multiple-linux-distributions-with-wsl",children:"Start your WSL Ubuntu distribution"})," if it doesn't start automatically."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/setup/environment#set-up-your-linux-username-and-password",children:"Set up a username and password for your WSL distribution"}),"."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/tutorials/wsl-containers",children:"Install Docker Desktop for Windows with WSL 2"}),". When you reach the Docker Desktop ",(0,r.jsx)(n.strong,{children:"WSL integration"})," settings, make sure your Ubuntu distribution is enabled, and then click ",(0,r.jsx)(n.strong,{children:"Apply & Restart"})," to enable Docker support in WSL."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Install and run OpenRAG from within your WSL Ubuntu distribution."}),"\n"]}),"\n"]}),"\n",(0,r.jsx)("br",{}),"\n",(0,r.jsxs)(n.p,{children:["If you encounter issues with port forwarding or the Windows Firewall, you might need to adjust the ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/security/operating-system-security/network-security/windows-firewall/hyper-v-firewall",children:"Hyper-V firewall settings"})," to allow communication between your WSL distribution and the Windows host. For more troubleshooting advice for networking issues, see ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/troubleshooting#common-issues",children:"Troubleshooting WSL common issues"}),"."]})]})}function l(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(o,{...e})}):o(e)}},4398:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>p,contentTitle:()=>h,default:()=>j,frontMatter:()=>d,metadata:()=>r,toc:()=>u});const r=JSON.parse('{"id":"get-started/install","title":"Install OpenRAG with TUI","description":"Install OpenRAG and then run the OpenRAG Terminal User Interface(TUI) to start your OpenRAG deployment with a guided setup process.","source":"@site/docs/get-started/install.mdx","sourceDirName":"get-started","slug":"/install","permalink":"/install","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/install.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG with TUI","slug":"/install"},"sidebar":"tutorialSidebar","previous":{"title":"Quickstart","permalink":"/quickstart"},"next":{"title":"Install OpenRAG containers","permalink":"/docker"}}');var i=s(4848),t=s(8453),o=s(1470),l=s(9365),a=s(3656),c=s(3862);const d={title:"Install OpenRAG with TUI",slug:"/install"},h=void 0,p={},u=[{value:"Prerequisites",id:"prerequisites",level:2},...c.RM,{value:"Install OpenRAG",id:"install",level:2},{value:"Set up OpenRAG with the TUI",id:"setup",level:2},...a.RM,{value:"Exit the OpenRAG TUI",id:"exit-the-openrag-tui",level:2},{value:"Manage OpenRAG containers with the TUI",id:"tui-container-management",level:2},{value:"Diagnostics",id:"diagnostics",level:3},{value:"Status",id:"status",level:3},{value:"Reset containers",id:"reset-containers",level:3},{value:"Start all services",id:"start-all-services",level:3},{value:"Start containers",id:"start-containers",level:4},{value:"Start native services (Docling)",id:"start-native-services-docling",level:4},{value:"Upgrade OpenRAG",id:"upgrade",level:2},{value:"Reinstall OpenRAG",id:"reinstall",level:2}];function x(e){const n={a:"a",admonition:"admonition",code:"code",em:"em",h2:"h2",h3:"h3",h4:"h4",img:"img",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,t.R)(),...e.components},{Details:r}=n;return r||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,i.jsxs)(i.Fragment,{children:[(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"#install",children:"Install OpenRAG"})," and then run the ",(0,i.jsx)(n.a,{href:"#setup",children:"OpenRAG Terminal User Interface(TUI)"})," to start your OpenRAG deployment with a guided setup process."]}),"\n",(0,i.jsx)(n.p,{children:"The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal."}),"\n",(0,i.jsx)(n.p,{children:(0,i.jsx)(n.img,{alt:"OpenRAG TUI Interface",src:s(5689).A+"",width:"1995",height:"1099"})}),"\n",(0,i.jsxs)(n.p,{children:["Instead of starting OpenRAG using Docker commands and manually editing values in the ",(0,i.jsx)(n.code,{children:".env"})," file, the TUI walks you through the setup. It prompts for variables where required, creates a ",(0,i.jsx)(n.code,{children:".env"})," file for you, and then starts OpenRAG."]}),"\n",(0,i.jsx)(n.p,{children:"Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs."}),"\n",(0,i.jsxs)(n.p,{children:["If you prefer running Podman or Docker containers and manually editing ",(0,i.jsx)(n.code,{children:".env"})," files, see ",(0,i.jsx)(n.a,{href:"/docker",children:"Install OpenRAG Containers"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["All OpenRAG installations require ",(0,i.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python"})," version 3.13 or later."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"If you aren't using the automatic installer script, install the following:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:(0,i.jsx)(n.code,{children:"podman-compose"})})," or ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". To use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands."]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Microsoft Windows only: To run OpenRAG on Windows, you must use the Windows Subsystem for Linux (WSL)."}),"\n",(0,i.jsxs)(r,{children:[(0,i.jsx)("summary",{children:"Install WSL for OpenRAG"}),(0,i.jsx)(c.Ay,{})]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Prepare model providers and credentials."}),"\n",(0,i.jsxs)(n.p,{children:["During ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),", you must select language model and embedding model providers.\nIf your chosen provider offers both types, you can use the same provider for both selections.\nIf your provider offers only one type, such as Anthropic, you must select two providers."]}),"\n",(0,i.jsx)(n.p,{children:"Gather the credentials and connection details for your chosen model providers before starting onboarding:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["OpenAI: Create an ",(0,i.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Anthropic language models: Create an ",(0,i.jsx)(n.a,{href:"https://www.anthropic.com/docs/api/reference",children:"Anthropic API key"}),"."]}),"\n",(0,i.jsx)(n.li,{children:"IBM watsonx.ai: Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment."}),"\n",(0,i.jsxs)(n.li,{children:["Ollama: Use the ",(0,i.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"})," to set up your Ollama instance locally, in the cloud, or on a remote server, and then get your Ollama server's base URL."]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,i.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"install",children:"Install OpenRAG"}),"\n",(0,i.jsx)(n.p,{children:"Choose an installation method based on your needs:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:"For new users, the automatic installer script detects and installs prerequisites and then runs OpenRAG."}),"\n",(0,i.jsxs)(n.li,{children:["For a quick test, use ",(0,i.jsx)(n.code,{children:"uvx"})," to run OpenRAG without creating a project or modifying files."]}),"\n",(0,i.jsxs)(n.li,{children:["Use ",(0,i.jsx)(n.code,{children:"uv add"})," to install OpenRAG as a managed dependency in a new or existing Python project."]}),"\n",(0,i.jsxs)(n.li,{children:["Use ",(0,i.jsx)(n.code,{children:"uv pip install"})," to install OpenRAG into an existing virtual environment."]}),"\n"]}),"\n",(0,i.jsxs)(o.A,{groupId:"Installation method",children:[(0,i.jsxs)(l.A,{value:"installer",label:"Automatic installer",default:!0,children:[(0,i.jsxs)(n.p,{children:["The script detects and installs uv, Docker/Podman, and Docker Compose prerequisites, then runs OpenRAG with ",(0,i.jsx)(n.code,{children:"uvx"}),"."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Create a directory to store the OpenRAG configuration files:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"mkdir openrag-workspace\ncd openrag-workspace\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run the installer:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"curl -fsSL https://docs.openr.ag/files/run_openrag_with_prereqs.sh | bash\n"})}),"\n"]}),"\n"]}),(0,i.jsxs)(n.p,{children:["The TUI creates a ",(0,i.jsx)(n.code,{children:".env"})," file and docker-compose files in the current working directory."]})]}),(0,i.jsxs)(l.A,{value:"uvx",label:"Quick test with uvx",children:[(0,i.jsxs)(n.p,{children:["Use ",(0,i.jsx)(n.code,{children:"uvx"})," to quickly run OpenRAG without creating a project or modifying any files."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Create a directory to store the OpenRAG configuration files:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"mkdir openrag-workspace\ncd openrag-workspace\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To run a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx --from openrag==0.1.30 openrag\n"})}),"\n"]}),"\n"]}),(0,i.jsxs)(n.p,{children:["The TUI creates a ",(0,i.jsx)(n.code,{children:".env"})," file and docker-compose files in the current working directory."]})]}),(0,i.jsxs)(l.A,{value:"uv-add",label:"Python project with uv add",children:[(0,i.jsxs)(n.p,{children:["Use ",(0,i.jsx)(n.code,{children:"uv add"})," to install OpenRAG as a dependency in your Python project. This adds OpenRAG to your ",(0,i.jsx)(n.code,{children:"pyproject.toml"})," and lockfile, making your installation reproducible and version-controlled."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Create a new project with a virtual environment:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv init YOUR_PROJECT_NAME\ncd YOUR_PROJECT_NAME\n"})}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"(venv)"})," prompt doesn't change, but ",(0,i.jsx)(n.code,{children:"uv"})," commands will automatically use the project's virtual environment."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Add OpenRAG to your project:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To add a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add openrag==0.1.30\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Start the OpenRAG TUI:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]}),(0,i.jsxs)(r,{closed:!0,children:[(0,i.jsx)("summary",{children:"Install a local wheel"}),(0,i.jsx)(n.p,{children:"If you downloaded the OpenRAG wheel to your local machine, install it by specifying its path:"}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Add the wheel to your project:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add PATH/TO/openrag-VERSION-py3-none-any.whl\n"})}),"\n",(0,i.jsxs)(n.p,{children:["Replace ",(0,i.jsx)(n.code,{children:"PATH/TO/"})," and ",(0,i.jsx)(n.code,{children:"VERSION"})," with the path and version of your downloaded OpenRAG ",(0,i.jsx)(n.code,{children:".whl"})," file."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]})]}),(0,i.jsxs)(l.A,{value:"uv-pip",label:"Existing virtual environment with uv pip install",children:[(0,i.jsxs)(n.p,{children:["Use ",(0,i.jsx)(n.code,{children:"uv pip install"})," to install OpenRAG into an existing virtual environment that isn't managed by ",(0,i.jsx)(n.code,{children:"uv"}),"."]}),(0,i.jsx)(n.admonition,{type:"tip",children:(0,i.jsxs)(n.p,{children:["For new projects, ",(0,i.jsx)(n.code,{children:"uv add"})," is recommended as it manages dependencies in your project's lockfile."]})}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Activate your virtual environment."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Install OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv pip install openrag\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]})]}),"\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#setup",children:"Set up OpenRAG with the TUI"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["If you encounter errors during installation, see ",(0,i.jsx)(n.a,{href:"/support/troubleshoot",children:"Troubleshoot OpenRAG"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"setup",children:"Set up OpenRAG with the TUI"}),"\n",(0,i.jsxs)(n.p,{children:["The OpenRAG setup process creates a ",(0,i.jsx)(n.code,{children:".env"})," file at the root of your OpenRAG directory, and then starts OpenRAG.\nIf it detects a ",(0,i.jsx)(n.code,{children:".env"})," file in the OpenRAG root directory, it sources any variables from the ",(0,i.jsx)(n.code,{children:".env"})," file."]}),"\n",(0,i.jsxs)(n.p,{children:["The TUI offers two setup methods to populate the required values. ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," can generate all minimum required values for OpenRAG. However, ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," doesn't enable ",(0,i.jsx)(n.a,{href:"/knowledge#auth",children:"OAuth connectors for cloud storage"}),". If you want to use OAuth connectors to upload documents from cloud storage, select ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),".\nIf OpenRAG detects OAuth credentials, it recommends ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),"."]}),"\n",(0,i.jsxs)(o.A,{groupId:"Setup method",children:[(0,i.jsx)(l.A,{value:"Basic setup",label:"Basic setup",default:!0,children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Basic Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," or press ",(0,i.jsx)("kbd",{children:"1"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow."]}),"\n",(0,i.jsxs)(n.p,{children:["The OpenSearch password is required. The Langflow admin password is optional.\nIf no Langflow admin password is generated, Langflow runs in ",(0,i.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login",children:"autologin mode"})," with no password required."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Optional: Paste your OpenAI API key in the OpenAI API key field. You can also provide this during onboarding or choose a different model provider."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),".\nYour passwords are saved in the ",(0,i.jsx)(n.code,{children:".env"})," file used to start OpenRAG."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start the Docling service, under ",(0,i.jsx)(n.strong,{children:"Native Services"}),", click ",(0,i.jsx)(n.strong,{children:"Start"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To open the OpenRAG application, navigate to the TUI main menu, and then click ",(0,i.jsx)(n.strong,{children:"Open App"}),".\nAlternatively, in your browser, navigate to ",(0,i.jsx)(n.code,{children:"localhost:3000"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]}),"\n"]}),"\n"]})}),(0,i.jsx)(l.A,{value:"Advanced setup",label:"Advanced setup",children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," or press ",(0,i.jsx)("kbd",{children:"2"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow."]}),"\n",(0,i.jsxs)(n.p,{children:["The OpenSearch password is required. The Langflow admin password is optional.\nIf no Langflow admin password is generated, Langflow runs in ",(0,i.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login",children:"autologin mode"})," with no password required."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Paste your OpenAI API key in the OpenAI API key field."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["If you want to upload documents from external storage, such as Google Drive, add the required OAuth credentials for the connectors that you want to use. These settings can be populated automatically if OpenRAG detects these credentials in a ",(0,i.jsx)(n.code,{children:".env"})," file in the OpenRAG installation directory."]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.strong,{children:"Amazon"}),": Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on ",(0,i.jsx)(n.a,{href:"https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html",children:"Configuring access to AWS applications"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.strong,{children:"Google"}),": Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the ",(0,i.jsx)(n.a,{href:"https://console.cloud.google.com/apis/credentials",children:"Google Cloud Console"}),". For more information, see the ",(0,i.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client documentation"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.strong,{children:"Microsoft"}),": For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide ",(0,i.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online",children:"Azure application registration credentials for SharePoint and OneDrive"}),". For more information, see the ",(0,i.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client documentation"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:["You can ",(0,i.jsx)(n.a,{href:"/ingestion#oauth-ingestion",children:"manage OAuth credentials"})," later, but it is recommended to configure them during initial set up."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"The OpenRAG TUI presents redirect URIs for your OAuth app.\nThese are the URLs your OAuth provider will redirect back to after user sign-in.\nRegister these redirect values with your OAuth provider as they are presented in the TUI."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start the Docling service, under ",(0,i.jsx)(n.strong,{children:"Native Services"}),", click ",(0,i.jsx)(n.strong,{children:"Start"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To open the OpenRAG application, navigate to the TUI main menu, and then click ",(0,i.jsx)(n.strong,{children:"Open App"}),".\nAlternatively, in your browser, navigate to ",(0,i.jsx)(n.code,{children:"localhost:3000"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"If you enabled OAuth connectors, you must sign in to your OAuth provider before being redirected to your OpenRAG instance."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Two additional variables are available for ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," at this point.\nOnly change these variables if you have a non-default network configuration for your deployment, such as using a reverse proxy or custom domain."]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.code,{children:"LANGFLOW_PUBLIC_URL"}),": Sets the base address to access the Langflow web interface. This is where users interact with flows in a browser."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.code,{children:"WEBHOOK_BASE_URL"}),": Sets the base address of the OpenRAG OAuth connector endpoint.\nSupported webhook endpoints:"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:"Amazon S3: Not applicable."}),"\n",(0,i.jsxs)(n.li,{children:["Google Drive: ",(0,i.jsx)(n.code,{children:"/connectors/google_drive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["OneDrive: ",(0,i.jsx)(n.code,{children:"/connectors/onedrive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["SharePoint: ",(0,i.jsx)(n.code,{children:"/connectors/sharepoint/webhook"})]}),"\n"]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]}),"\n"]}),"\n"]})})]}),"\n",(0,i.jsx)(a.Ay,{}),"\n",(0,i.jsx)(n.h2,{id:"exit-the-openrag-tui",children:"Exit the OpenRAG TUI"}),"\n",(0,i.jsxs)(n.p,{children:["To exit the OpenRAG TUI, navigate to the main menu, and then press ",(0,i.jsx)("kbd",{children:"q"}),".\nThe OpenRAG containers continue to run until they are stopped.\nFor more information, see ",(0,i.jsx)(n.a,{href:"#tui-container-management",children:"Manage OpenRAG containers with the TUI "}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To relaunch the TUI, run ",(0,i.jsx)(n.code,{children:"uv run openrag"}),".\nIf you installed OpenRAG with ",(0,i.jsx)(n.code,{children:"uvx"}),", run ",(0,i.jsx)(n.code,{children:"uvx openrag"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"tui-container-management",children:"Manage OpenRAG containers with the TUI"}),"\n",(0,i.jsx)(n.p,{children:"After installation, the TUI can deploy, manage, and upgrade your OpenRAG containers."}),"\n",(0,i.jsx)(n.h3,{id:"diagnostics",children:"Diagnostics"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Diagnostics"})," menu provides health monitoring for your container runtimes and monitoring of your OpenSearch security."]}),"\n",(0,i.jsx)(n.h3,{id:"status",children:"Status"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Status"})," menu displays information on your container deployment.\nHere you can check container health, find your service ports, view logs, and upgrade your containers."]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Logs"}),": To view streaming logs, select the container you want to view, and press ",(0,i.jsx)("kbd",{children:"l"}),".\nTo copy the logs, click ",(0,i.jsx)(n.strong,{children:"Copy to Clipboard"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Upgrade"}),": Check for updates. For more information, see ",(0,i.jsx)(n.a,{href:"#upgrade",children:"upgrade OpenRAG"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Factory Reset"}),": This is a destructive action that ",(0,i.jsx)(n.a,{href:"#reset-containers",children:"resets your containers"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Native services"}),": ",(0,i.jsx)(n.a,{href:"#start-all-services",children:"View and manage OpenRAG services"})," that run directly on your local machine instead of a container."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h3,{id:"reset-containers",children:"Reset containers"}),"\n",(0,i.jsx)(n.p,{children:"Reset your OpenRAG deployment by recreating the containers and removing some related data."}),"\n",(0,i.jsxs)(n.admonition,{type:"warning",children:[(0,i.jsx)(n.p,{children:"This is a destructive action that destroys the following:"}),(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:"All OpenRAG containers, volumes, and local images"}),"\n",(0,i.jsx)(n.li,{children:"Any additional Docker objects"}),"\n",(0,i.jsxs)(n.li,{children:["The contents of OpenRAG's ",(0,i.jsx)(n.code,{children:"config"})," and ",(0,i.jsx)(n.code,{children:"./opensearch-data"})," directories"]}),"\n",(0,i.jsxs)(n.li,{children:["The ",(0,i.jsx)(n.code,{children:"conversations.json"})," file"]}),"\n"]}),(0,i.jsxs)(n.p,{children:["This operation ",(0,i.jsx)(n.em,{children:"doesn't"})," remove the ",(0,i.jsx)(n.code,{children:".env"})," file or the contents of the ",(0,i.jsx)(n.code,{children:"./openrag-documents"})," directory."]})]}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To destroy and recreate your OpenRAG containers, go to the TUI ",(0,i.jsxs)(n.a,{href:"#status",children:[(0,i.jsx)(n.strong,{children:"Status"})," menu"]}),", and then click ",(0,i.jsx)(n.strong,{children:"Factory Reset"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["This function runs the following commands ",(0,i.jsx)(n.em,{children:"and"})," deletes the contents of OpenRAG's ",(0,i.jsx)(n.code,{children:"config"})," and ",(0,i.jsx)(n.code,{children:"./opensearch-data"})," directories."]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"docker compose down --volumes --remove-orphans --rmi local\ndocker system prune -f\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["If you reset your containers as part of reinstalling OpenRAG, continue the ",(0,i.jsx)(n.a,{href:"#reinstall",children:"reinstallation process"})," after resetting the containers.\nOtherwise, in the TUI ",(0,i.jsx)(n.strong,{children:"Setup"})," menu, repeat the ",(0,i.jsx)(n.a,{href:"#setup",children:"setup process"})," to start the services and launch the OpenRAG app. Your OpenRAG passwords, OAuth credentials (if previously set), and onboarding configuration are restored from the ",(0,i.jsx)(n.code,{children:".env"})," file."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h3,{id:"start-all-services",children:"Start all services"}),"\n",(0,i.jsx)(n.p,{children:"Through the TUI, you can view and manage OpenRAG services that run in containers and directly on your local machine."}),"\n",(0,i.jsx)(n.h4,{id:"start-containers",children:"Start containers"}),"\n",(0,i.jsxs)(n.p,{children:["On the TUI main page or the ",(0,i.jsx)(n.strong,{children:"Setup"})," menu, click ",(0,i.jsx)(n.strong,{children:"Start All Services"})," to start the OpenRAG containers and launch OpenRAG itself."]}),"\n",(0,i.jsx)(n.p,{children:"When you start all services, the following processes happen:"}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["OpenRAG automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for ",(0,i.jsx)(n.code,{children:"CUDA"}),", ",(0,i.jsx)(n.code,{children:"NVIDIA_SMI"}),", and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["OpenRAG pulls the OpenRAG container images with ",(0,i.jsx)(n.code,{children:"docker compose pull"})," if any images are missing."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["OpenRAG deploys the containers with ",(0,i.jsx)(n.code,{children:"docker compose up -d"}),"."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h4,{id:"start-native-services-docling",children:"Start native services (Docling)"}),"\n",(0,i.jsxs)(n.p,{children:["A ",(0,i.jsx)(n.em,{children:"native service"})," in OpenRAG is a service that runs locally on your machine, not within a container. For example, the ",(0,i.jsx)(n.code,{children:"docling serve"})," process is an OpenRAG native service because this document processing service runs on your local machine, separate from the OpenRAG containers."]}),"\n",(0,i.jsxs)(n.p,{children:["From the ",(0,i.jsx)(n.strong,{children:"Status"})," menu, you can view the status, port, and process ID (PID) of the OpenRAG native services.\nYou can also click ",(0,i.jsx)(n.strong,{children:"Stop"})," or ",(0,i.jsx)(n.strong,{children:"Restart"})," to stop and start OpenRAG native services."]}),"\n",(0,i.jsx)(n.h2,{id:"upgrade",children:"Upgrade OpenRAG"}),"\n",(0,i.jsx)(n.p,{children:"To upgrade OpenRAG, upgrade the OpenRAG Python package, and then upgrade the OpenRAG containers."}),"\n",(0,i.jsxs)(n.p,{children:["This is a two part process because upgrading the OpenRAG Python package updates the TUI and Python code, but the container versions are controlled by environment variables in your ",(0,i.jsx)(n.code,{children:".env"})," file."]}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Stop your OpenRAG containers: In the OpenRAG TUI, go to the ",(0,i.jsx)(n.strong,{children:"Status"})," menu, and then click ",(0,i.jsx)(n.strong,{children:"Stop Services"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Upgrade the OpenRAG Python package to the latest version from ",(0,i.jsx)(n.a,{href:"https://pypi.org/project/openrag/",children:"PyPI"}),"."]}),"\n",(0,i.jsxs)(o.A,{groupId:"Installation method",children:[(0,i.jsxs)(l.A,{value:"installer",label:"Automatic installer or uvx",default:!0,children:[(0,i.jsxs)(n.p,{children:["Use these steps to upgrade the Python package if you installed OpenRAG using the automatic installer or ",(0,i.jsx)(n.code,{children:"uvx"}),":"]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Navigate to your OpenRAG workspace directory:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"cd openrag-workspace\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Upgrade the OpenRAG package:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx --from openrag openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To upgrade to a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx --from openrag==0.1.33 openrag\n"})}),"\n"]}),"\n"]})]}),(0,i.jsxs)(l.A,{value:"uv-add",label:"Python project (uv add)",children:[(0,i.jsxs)(n.p,{children:["Use these steps to upgrade the Python package if you installed OpenRAG in a Python project with ",(0,i.jsx)(n.code,{children:"uv add"}),":"]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Navigate to your project directory:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"cd YOUR_PROJECT_NAME\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Update OpenRAG to the latest version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add --upgrade openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To upgrade to a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add --upgrade openrag==0.1.33\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Start the OpenRAG TUI:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]}),(0,i.jsxs)(l.A,{value:"uv-pip",label:"Virtual environment (uv pip install)",children:[(0,i.jsxs)(n.p,{children:["Use these steps to upgrade the Python package if you installed OpenRAG in a venv with ",(0,i.jsx)(n.code,{children:"uv pip install"}),":"]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Activate your virtual environment."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Upgrade OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv pip install --upgrade openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To upgrade to a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv pip install --upgrade openrag==0.1.33\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Start the OpenRAG TUI:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]})]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Start the upgraded OpenRAG containers: In the OpenRAG TUI, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),", and then wait while the containers start."]}),"\n",(0,i.jsxs)(n.p,{children:["After upgrading the Python package, OpenRAG runs ",(0,i.jsx)(n.code,{children:"docker compose pull"})," to get the appropriate container images matching the version specified in your OpenRAG ",(0,i.jsx)(n.code,{children:".env"})," file. Then, it recreates the containers with the new images using ",(0,i.jsx)(n.code,{children:"docker compose up -d --force-recreate"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["In the ",(0,i.jsx)(n.code,{children:".env"})," file, the ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION"})," ",(0,i.jsx)(n.a,{href:"/reference/configuration#system-settings",children:"environment variable"})," is set to ",(0,i.jsx)(n.code,{children:"latest"})," by default, which it pulls the ",(0,i.jsx)(n.code,{children:"latest"})," available container images.\nTo pin a specific container image version, you can set ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION"})," to the desired container image version, such as ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION=0.1.33"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["However, when you upgrade the Python package, OpenRAG automatically attempts to keep the ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION"})," synchronized with the Python package version.\nYou might need to edit the ",(0,i.jsx)(n.code,{children:".env"})," file after upgrading the Python package to enforce a different container version.\nThe TUI warns you if it detects a version mismatch."]}),"\n",(0,i.jsxs)(n.p,{children:["If you get an error that ",(0,i.jsx)(n.code,{children:"langflow container already exists"})," error during upgrade, see ",(0,i.jsx)(n.a,{href:"/support/troubleshoot#langflow-container-already-exists-during-upgrade",children:"Langflow container already exists during upgrade"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["When the upgrade process is complete, you can close the ",(0,i.jsx)(n.strong,{children:"Status"})," window and continue using OpenRAG."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"reinstall",children:"Reinstall OpenRAG"}),"\n",(0,i.jsx)(n.p,{children:"Reset your OpenRAG deployment by recreating the containers and, optionally, removing related data:"}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["In the TUI, ",(0,i.jsx)(n.a,{href:"#reset-containers",children:"reset your containers"})," to destroy the following:"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:"All existing OpenRAG containers, volumes, and local images"}),"\n",(0,i.jsx)(n.li,{children:"Any additional Docker objects"}),"\n",(0,i.jsxs)(n.li,{children:["The contents of OpenRAG's ",(0,i.jsx)(n.code,{children:"config"})," and ",(0,i.jsx)(n.code,{children:"./opensearch-data"})," directories"]}),"\n",(0,i.jsxs)(n.li,{children:["The ",(0,i.jsx)(n.code,{children:"conversations.json"})," file"]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Optional: Remove data that wasn't deleted by the ",(0,i.jsx)(n.strong,{children:"Factory Reset"})," operation. For a completely fresh installation, delete all of this data."]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsxs)(n.strong,{children:["OpenRAG's ",(0,i.jsx)(n.code,{children:".env"})," file"]}),": Contains your OpenRAG configuration, including OpenRAG passwords, API keys, OAuth settings, and other ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"environment variables"}),". If you delete this file, you must either repeat the ",(0,i.jsx)(n.a,{href:"#setup",children:"setup process"})," to create a new ",(0,i.jsx)(n.code,{children:".env"})," file, or add a populated ",(0,i.jsx)(n.code,{children:".env"})," file to your OpenRAG installation directory before restarting OpenRAG."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsxs)(n.strong,{children:["The contents of the ",(0,i.jsx)(n.code,{children:"./openrag-documents"})," directory"]}),": Contains documents that you uploaded to OpenRAG. Delete these files to prevent documents from being reingested to your knowledge base after restarting OpenRAG. However, you might want to preserve OpenRAG's ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/tree/main/openrag-documents",children:"default documents"}),"."]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["In the TUI ",(0,i.jsx)(n.strong,{children:"Setup"})," menu, repeat the ",(0,i.jsx)(n.a,{href:"#setup",children:"setup process"})," to configure OpenRAG, restart the services, and launch the OpenRAG app, and repeat ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),".\nIf OpenRAG detects a ",(0,i.jsx)(n.code,{children:".env"})," file, it automatically populates any OpenRAG passwords, OAuth credentials, and onboarding configuration set in that file."]}),"\n"]}),"\n"]})]})}function j(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(x,{...e})}):x(e)}},5689:(e,n,s)=>{s.d(n,{A:()=>r});const r=s.p+"assets/images/OpenRAG_TUI_2025-09-10T13_04_11_757637-9441c53ba39162a88ac6c11cbeaed0e0.svg"}}]); \ No newline at end of file diff --git a/assets/js/d0314b07.86b4cd81.js b/assets/js/d0314b07.86b4cd81.js deleted file mode 100644 index 6eb872d5..00000000 --- a/assets/js/d0314b07.86b4cd81.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[5750],{3656:(e,n,s)=>{s.d(n,{Ay:()=>p,RM:()=>d});var r=s(4848),i=s(8453),t=s(7733),o=s(1470),l=s(9365);function a(e){const n={a:"a",code:"code",li:"li",ol:"ol",p:"p",strong:"strong",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.p,{children:"Using Ollama for your OpenRAG language model provider offers greater flexibility and configuration, but can also be overwhelming to start.\nThese recommendations are a reasonable starting point for users with at least one GPU and experience running LLMs locally."}),"\n",(0,r.jsxs)(n.p,{children:["For best performance, OpenRAG recommends OpenAI's ",(0,r.jsx)(n.code,{children:"gpt-oss:20b"})," language model. However, this model uses 16GB of RAM, so consider using Ollama Cloud or running Ollama on a remote machine."]}),"\n",(0,r.jsxs)(n.p,{children:["For generating embeddings, OpenRAG recommends the ",(0,r.jsx)(n.a,{href:"https://ollama.com/library/nomic-embed-text",children:(0,r.jsx)(n.code,{children:"nomic-embed-text"})})," embedding model, which provides high-quality embeddings optimized for retrieval tasks."]}),"\n",(0,r.jsxs)(n.p,{children:["To run models in ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/cloud",children:(0,r.jsx)(n.strong,{children:"Ollama Cloud"})}),", follow these steps:"]}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Sign in to Ollama Cloud.\nIn a terminal, enter ",(0,r.jsx)(n.code,{children:"ollama signin"})," to connect your local environment with Ollama Cloud."]}),"\n",(0,r.jsxs)(n.li,{children:["To run the model, in Ollama, select the ",(0,r.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model, or run ",(0,r.jsx)(n.code,{children:"ollama run gpt-oss:20b-cloud"})," in a terminal.\nOllama Cloud models are run at the same URL as your local Ollama server at ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),", and automatically offloaded to Ollama's cloud service."]}),"\n",(0,r.jsxs)(n.li,{children:["Connect OpenRAG to the same local Ollama server as you would for local models in onboarding, using the default address of ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the ",(0,r.jsx)(n.strong,{children:"Language model"})," field, select the ",(0,r.jsx)(n.code,{children:"gpt-oss:20b-cloud"})," model."]}),"\n"]}),"\n",(0,r.jsx)("br",{}),"\n",(0,r.jsxs)(n.p,{children:["To run models on a ",(0,r.jsx)(n.strong,{children:"remote Ollama server"}),", follow these steps:"]}),"\n",(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsx)(n.li,{children:"Ensure your remote Ollama server is accessible from your OpenRAG instance."}),"\n",(0,r.jsxs)(n.li,{children:["In the ",(0,r.jsx)(n.strong,{children:"Ollama Base URL"})," field, enter your remote Ollama server's base URL, such as ",(0,r.jsx)(n.code,{children:"http://your-remote-server:11434"}),".\nOpenRAG connects to the remote Ollama server and populates the lists with the server's available models."]}),"\n",(0,r.jsxs)(n.li,{children:["Select your ",(0,r.jsx)(n.strong,{children:"Embedding model"})," and ",(0,r.jsx)(n.strong,{children:"Language model"})," from the available options."]}),"\n"]})]})}function c(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(a,{...e})}):a(e)}const d=[{value:"Application onboarding",id:"application-onboarding",level:2}];function h(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",...(0,i.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,r.jsxs)(r.Fragment,{children:[(0,r.jsx)(n.h2,{id:"application-onboarding",children:"Application onboarding"}),"\n",(0,r.jsx)(n.p,{children:"The first time you start OpenRAG, regardless of how you installed it, you must complete application onboarding."}),"\n",(0,r.jsxs)(n.p,{children:["Some of these variables, such as the embedding models, can be changed seamlessly after onboarding.\nOthers are immutable and require you to destroy and recreate the OpenRAG containers.\nFor more information, see ",(0,r.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"}),"."]}),"\n",(0,r.jsx)(n.p,{children:"You can use different providers for your language model and embedding model, such as Anthropic for the language model and OpenAI for the embeddings model.\nAdditionally, you can set multiple embedding models."}),"\n",(0,r.jsx)(n.p,{children:"You only need to complete onboarding for your preferred providers."}),"\n",(0,r.jsxs)(o.A,{groupId:"Provider",children:[(0,r.jsxs)(l.A,{value:"Anthropic",label:"Anthropic",default:!0,children:[(0,r.jsx)(n.admonition,{type:"info",children:(0,r.jsx)(n.p,{children:"Anthropic doesn't provide embedding models. If you select Anthropic for your language model, you must select a different provider for embeddings."})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Use environment Anthropic API key"})," to automatically use your key from the ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an Anthropic API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,r.jsx)(t.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]}),(0,r.jsx)(l.A,{value:"OpenAI",label:"OpenAI",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Enable ",(0,r.jsx)(n.strong,{children:"Get API key from environment variable"})," to automatically enter your key from the TUI-generated ",(0,r.jsx)(n.code,{children:".env"})," file.\nAlternatively, paste an OpenAI API key into the field."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,r.jsx)(t.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsx)(l.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["Complete the fields for ",(0,r.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", ",(0,r.jsx)(n.strong,{children:"IBM Project ID"}),", and ",(0,r.jsx)(n.strong,{children:"IBM API key"}),".\nThese values are found in your IBM watsonx deployment."]}),"\n",(0,r.jsxs)(n.li,{children:["Under ",(0,r.jsx)(n.strong,{children:"Advanced settings"}),", select your ",(0,r.jsx)(n.strong,{children:"Language Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["In the second onboarding panel, select a provider for embeddings and select your ",(0,r.jsx)(n.strong,{children:"Embedding Model"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),".\nAlternatively, click ",(0,r.jsx)(t.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,r.jsx)(n.strong,{children:"Skip overview"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})}),(0,r.jsxs)(l.A,{value:"Ollama",label:"Ollama",children:[(0,r.jsx)(n.admonition,{type:"info",children:(0,r.jsxs)(n.p,{children:["Ollama isn't installed with OpenRAG. To install Ollama, see the ",(0,r.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"}),"."]})}),(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["To connect to an Ollama server running on your local machine, enter your Ollama server's base URL address.\nThe default Ollama server address is ",(0,r.jsx)(n.code,{children:"http://localhost:11434"}),".\nOpenRAG connects to the Ollama server and populates the model lists with the server's available models."]}),"\n",(0,r.jsxs)(n.li,{children:["Select the ",(0,r.jsx)(n.strong,{children:"Embedding Model"})," and ",(0,r.jsx)(n.strong,{children:"Language Model"})," your Ollama server is running.","\n",(0,r.jsxs)(s,{closed:!0,children:[(0,r.jsx)("summary",{children:"Ollama model selection and external server configuration"}),(0,r.jsx)(c,{})]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["Click ",(0,r.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["To complete the onboarding tasks, click ",(0,r.jsx)(n.strong,{children:"What is OpenRAG"}),", and then click ",(0,r.jsx)(n.strong,{children:"Add a Document"}),"."]}),"\n",(0,r.jsxs)(n.li,{children:["Continue with the ",(0,r.jsx)(n.a,{href:"/quickstart",children:"Quickstart"}),"."]}),"\n"]})]})]})]})}function p(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(h,{...e})}):h(e)}},3862:(e,n,s)=>{s.d(n,{Ay:()=>l,RM:()=>t});var r=s(4848),i=s(8453);const t=[];function o(e){const n={a:"a",admonition:"admonition",code:"code",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",...(0,i.R)(),...e.components};return(0,r.jsxs)(r.Fragment,{children:[(0,r.jsxs)(n.ol,{children:["\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install",children:"Install WSL"})," with the Ubuntu distribution using WSL 2:"]}),"\n",(0,r.jsx)(n.pre,{children:(0,r.jsx)(n.code,{className:"language-powershell",children:"wsl --install -d Ubuntu\n"})}),"\n",(0,r.jsxs)(n.p,{children:["For new installations, the ",(0,r.jsx)(n.code,{children:"wsl --install"})," command uses WSL 2 and Ubuntu by default."]}),"\n",(0,r.jsxs)(n.p,{children:["For existing WSL installations, you can ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#change-the-default-linux-distribution-installed",children:"change the distribution"})," and ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#upgrade-version-from-wsl-1-to-wsl-2",children:"check the WSL version"}),"."]}),"\n",(0,r.jsx)(n.admonition,{title:"Known limitation",type:"warning",children:(0,r.jsx)(n.p,{children:"OpenRAG isn't compatible with nested virtualization, which can cause networking issues.\nDon't install OpenRAG on a WSL distribution that is installed inside a Windows VM.\nInstead, install OpenRAG on your base OS or a non-nested Linux VM."})}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install#ways-to-run-multiple-linux-distributions-with-wsl",children:"Start your WSL Ubuntu distribution"})," if it doesn't start automatically."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/setup/environment#set-up-your-linux-username-and-password",children:"Set up a username and password for your WSL distribution"}),"."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsxs)(n.p,{children:[(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/tutorials/wsl-containers",children:"Install Docker Desktop for Windows with WSL 2"}),". When you reach the Docker Desktop ",(0,r.jsx)(n.strong,{children:"WSL integration"})," settings, make sure your Ubuntu distribution is enabled, and then click ",(0,r.jsx)(n.strong,{children:"Apply & Restart"})," to enable Docker support in WSL."]}),"\n"]}),"\n",(0,r.jsxs)(n.li,{children:["\n",(0,r.jsx)(n.p,{children:"Install and run OpenRAG from within your WSL Ubuntu distribution."}),"\n"]}),"\n"]}),"\n",(0,r.jsx)("br",{}),"\n",(0,r.jsxs)(n.p,{children:["If you encounter issues with port forwarding or the Windows Firewall, you might need to adjust the ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/security/operating-system-security/network-security/windows-firewall/hyper-v-firewall",children:"Hyper-V firewall settings"})," to allow communication between your WSL distribution and the Windows host. For more troubleshooting advice for networking issues, see ",(0,r.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/troubleshooting#common-issues",children:"Troubleshooting WSL common issues"}),"."]})]})}function l(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,r.jsx)(n,{...e,children:(0,r.jsx)(o,{...e})}):o(e)}},4398:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>p,contentTitle:()=>h,default:()=>j,frontMatter:()=>d,metadata:()=>r,toc:()=>u});const r=JSON.parse('{"id":"get-started/install","title":"Install OpenRAG with TUI","description":"Install OpenRAG and then run the OpenRAG Terminal User Interface(TUI) to start your OpenRAG deployment with a guided setup process.","source":"@site/docs/get-started/install.mdx","sourceDirName":"get-started","slug":"/install","permalink":"/install","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/install.mdx","tags":[],"version":"current","frontMatter":{"title":"Install OpenRAG with TUI","slug":"/install"},"sidebar":"tutorialSidebar","previous":{"title":"Quickstart","permalink":"/quickstart"},"next":{"title":"Install OpenRAG containers","permalink":"/docker"}}');var i=s(4848),t=s(8453),o=s(1470),l=s(9365),a=s(3656),c=s(3862);const d={title:"Install OpenRAG with TUI",slug:"/install"},h=void 0,p={},u=[{value:"Prerequisites",id:"prerequisites",level:2},...c.RM,{value:"Install OpenRAG",id:"install",level:2},{value:"Set up OpenRAG with the TUI",id:"setup",level:2},...a.RM,{value:"Exit the OpenRAG TUI",id:"exit-the-openrag-tui",level:2},{value:"Manage OpenRAG containers with the TUI",id:"tui-container-management",level:2},{value:"Diagnostics",id:"diagnostics",level:3},{value:"Status",id:"status",level:3},{value:"Reset containers",id:"reset-containers",level:3},{value:"Start all services",id:"start-all-services",level:3},{value:"Upgrade OpenRAG",id:"upgrade",level:2},{value:"Reinstall OpenRAG",id:"reinstall",level:2}];function x(e){const n={a:"a",admonition:"admonition",code:"code",em:"em",h2:"h2",h3:"h3",img:"img",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,t.R)(),...e.components},{Details:r}=n;return r||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,i.jsxs)(i.Fragment,{children:[(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.a,{href:"#install",children:"Install OpenRAG"})," and then run the ",(0,i.jsx)(n.a,{href:"#setup",children:"OpenRAG Terminal User Interface(TUI)"})," to start your OpenRAG deployment with a guided setup process."]}),"\n",(0,i.jsx)(n.p,{children:"The OpenRAG Terminal User Interface (TUI) allows you to set up, configure, and monitor your OpenRAG deployment directly from the terminal."}),"\n",(0,i.jsx)(n.p,{children:(0,i.jsx)(n.img,{alt:"OpenRAG TUI Interface",src:s(5689).A+"",width:"1995",height:"1099"})}),"\n",(0,i.jsxs)(n.p,{children:["Instead of starting OpenRAG using Docker commands and manually editing values in the ",(0,i.jsx)(n.code,{children:".env"})," file, the TUI walks you through the setup. It prompts for variables where required, creates a ",(0,i.jsx)(n.code,{children:".env"})," file for you, and then starts OpenRAG."]}),"\n",(0,i.jsx)(n.p,{children:"Once OpenRAG is running, use the TUI to monitor your application, control your containers, and retrieve logs."}),"\n",(0,i.jsxs)(n.p,{children:["If you prefer running Podman or Docker containers and manually editing ",(0,i.jsx)(n.code,{children:".env"})," files, see ",(0,i.jsx)(n.a,{href:"/docker",children:"Install OpenRAG Containers"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["All OpenRAG installations require ",(0,i.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python"})," version 3.13 or later."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"If you aren't using the automatic installer script, install the following:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:(0,i.jsx)(n.code,{children:"podman-compose"})})," or ",(0,i.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),". To use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands."]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Microsoft Windows only: To run OpenRAG on Windows, you must use the Windows Subsystem for Linux (WSL)."}),"\n",(0,i.jsxs)(r,{children:[(0,i.jsx)("summary",{children:"Install WSL for OpenRAG"}),(0,i.jsx)(c.Ay,{})]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Prepare model providers and credentials."}),"\n",(0,i.jsxs)(n.p,{children:["During ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),", you must select language model and embedding model providers.\nIf your chosen provider offers both types, you can use the same provider for both selections.\nIf your provider offers only one type, such as Anthropic, you must select two providers."]}),"\n",(0,i.jsx)(n.p,{children:"Gather the credentials and connection details for your chosen model providers before starting onboarding:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["OpenAI: Create an ",(0,i.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:["Anthropic language models: Create an ",(0,i.jsx)(n.a,{href:"https://www.anthropic.com/docs/api/reference",children:"Anthropic API key"}),"."]}),"\n",(0,i.jsx)(n.li,{children:"IBM watsonx.ai: Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment."}),"\n",(0,i.jsxs)(n.li,{children:["Ollama: Use the ",(0,i.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama documentation"})," to set up your Ollama instance locally, in the cloud, or on a remote server, and then get your Ollama server's base URL."]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,i.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine. If you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"install",children:"Install OpenRAG"}),"\n",(0,i.jsx)(n.p,{children:"Choose an installation method based on your needs:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:"For new users, the automatic installer script detects and installs prerequisites and then runs OpenRAG."}),"\n",(0,i.jsxs)(n.li,{children:["For a quick test, use ",(0,i.jsx)(n.code,{children:"uvx"})," to run OpenRAG without creating a project or modifying files."]}),"\n",(0,i.jsxs)(n.li,{children:["Use ",(0,i.jsx)(n.code,{children:"uv add"})," to install OpenRAG as a managed dependency in a new or existing Python project."]}),"\n",(0,i.jsxs)(n.li,{children:["Use ",(0,i.jsx)(n.code,{children:"uv pip install"})," to install OpenRAG into an existing virtual environment."]}),"\n"]}),"\n",(0,i.jsxs)(o.A,{groupId:"Installation method",children:[(0,i.jsxs)(l.A,{value:"installer",label:"Automatic installer",default:!0,children:[(0,i.jsxs)(n.p,{children:["The script detects and installs uv, Docker/Podman, and Docker Compose prerequisites, then runs OpenRAG with ",(0,i.jsx)(n.code,{children:"uvx"}),"."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Create a directory to store the OpenRAG configuration files:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"mkdir openrag-workspace\ncd openrag-workspace\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run the installer:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"curl -fsSL https://docs.openr.ag/files/run_openrag_with_prereqs.sh | bash\n"})}),"\n"]}),"\n"]}),(0,i.jsxs)(n.p,{children:["The TUI creates a ",(0,i.jsx)(n.code,{children:".env"})," file and docker-compose files in the current working directory."]})]}),(0,i.jsxs)(l.A,{value:"uvx",label:"Quick test with uvx",children:[(0,i.jsxs)(n.p,{children:["Use ",(0,i.jsx)(n.code,{children:"uvx"})," to quickly run OpenRAG without creating a project or modifying any files."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Create a directory to store the OpenRAG configuration files:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"mkdir openrag-workspace\ncd openrag-workspace\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To run a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx --from openrag==0.1.30 openrag\n"})}),"\n"]}),"\n"]}),(0,i.jsxs)(n.p,{children:["The TUI creates a ",(0,i.jsx)(n.code,{children:".env"})," file and docker-compose files in the current working directory."]})]}),(0,i.jsxs)(l.A,{value:"uv-add",label:"Python project with uv add",children:[(0,i.jsxs)(n.p,{children:["Use ",(0,i.jsx)(n.code,{children:"uv add"})," to install OpenRAG as a dependency in your Python project. This adds OpenRAG to your ",(0,i.jsx)(n.code,{children:"pyproject.toml"})," and lockfile, making your installation reproducible and version-controlled."]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Create a new project with a virtual environment:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv init YOUR_PROJECT_NAME\ncd YOUR_PROJECT_NAME\n"})}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.code,{children:"(venv)"})," prompt doesn't change, but ",(0,i.jsx)(n.code,{children:"uv"})," commands will automatically use the project's virtual environment."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Add OpenRAG to your project:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To add a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add openrag==0.1.30\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Start the OpenRAG TUI:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]}),(0,i.jsxs)(r,{closed:!0,children:[(0,i.jsx)("summary",{children:"Install a local wheel"}),(0,i.jsx)(n.p,{children:"If you downloaded the OpenRAG wheel to your local machine, install it by specifying its path:"}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Add the wheel to your project:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add PATH/TO/openrag-VERSION-py3-none-any.whl\n"})}),"\n",(0,i.jsxs)(n.p,{children:["Replace ",(0,i.jsx)(n.code,{children:"PATH/TO/"})," and ",(0,i.jsx)(n.code,{children:"VERSION"})," with the path and version of your downloaded OpenRAG ",(0,i.jsx)(n.code,{children:".whl"})," file."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]})]}),(0,i.jsxs)(l.A,{value:"uv-pip",label:"Existing virtual environment with uv pip install",children:[(0,i.jsxs)(n.p,{children:["Use ",(0,i.jsx)(n.code,{children:"uv pip install"})," to install OpenRAG into an existing virtual environment that isn't managed by ",(0,i.jsx)(n.code,{children:"uv"}),"."]}),(0,i.jsx)(n.admonition,{type:"tip",children:(0,i.jsxs)(n.p,{children:["For new projects, ",(0,i.jsx)(n.code,{children:"uv add"})," is recommended as it manages dependencies in your project's lockfile."]})}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Activate your virtual environment."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Install OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv pip install openrag\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Run OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]})]}),"\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#setup",children:"Set up OpenRAG with the TUI"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["If you encounter errors during installation, see ",(0,i.jsx)(n.a,{href:"/support/troubleshoot",children:"Troubleshoot OpenRAG"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"setup",children:"Set up OpenRAG with the TUI"}),"\n",(0,i.jsxs)(n.p,{children:["The OpenRAG setup process creates a ",(0,i.jsx)(n.code,{children:".env"})," file at the root of your OpenRAG directory, and then starts OpenRAG.\nIf it detects a ",(0,i.jsx)(n.code,{children:".env"})," file in the OpenRAG root directory, it sources any variables from the ",(0,i.jsx)(n.code,{children:".env"})," file."]}),"\n",(0,i.jsxs)(n.p,{children:["The TUI offers two setup methods to populate the required values. ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," can generate all minimum required values for OpenRAG. However, ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," doesn't enable ",(0,i.jsx)(n.a,{href:"/knowledge#auth",children:"OAuth connectors for cloud storage"}),". If you want to use OAuth connectors to upload documents from cloud storage, select ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),".\nIf OpenRAG detects OAuth credentials, it recommends ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),"."]}),"\n",(0,i.jsxs)(o.A,{groupId:"Setup method",children:[(0,i.jsx)(l.A,{value:"Basic setup",label:"Basic setup",default:!0,children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Basic Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Basic Setup"})," or press ",(0,i.jsx)("kbd",{children:"1"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow."]}),"\n",(0,i.jsxs)(n.p,{children:["The OpenSearch password is required. The Langflow admin password is optional.\nIf no Langflow admin password is generated, Langflow runs in ",(0,i.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login",children:"autologin mode"})," with no password required."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Optional: Paste your OpenAI API key in the OpenAI API key field. You can also provide this during onboarding or choose a different model provider."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),".\nYour passwords are saved in the ",(0,i.jsx)(n.code,{children:".env"})," file used to start OpenRAG."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start the Docling service, under ",(0,i.jsx)(n.strong,{children:"Native Services"}),", click ",(0,i.jsx)(n.strong,{children:"Start"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To open the OpenRAG application, navigate to the TUI main menu, and then click ",(0,i.jsx)(n.strong,{children:"Open App"}),".\nAlternatively, in your browser, navigate to ",(0,i.jsx)(n.code,{children:"localhost:3000"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]}),"\n"]}),"\n"]})}),(0,i.jsx)(l.A,{value:"Advanced setup",label:"Advanced setup",children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To install OpenRAG with ",(0,i.jsx)(n.strong,{children:"Advanced Setup"}),", click ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," or press ",(0,i.jsx)("kbd",{children:"2"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Generate Passwords"})," to generate passwords for OpenSearch and Langflow."]}),"\n",(0,i.jsxs)(n.p,{children:["The OpenSearch password is required. The Langflow admin password is optional.\nIf no Langflow admin password is generated, Langflow runs in ",(0,i.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-auto-login",children:"autologin mode"})," with no password required."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Paste your OpenAI API key in the OpenAI API key field."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["If you want to upload documents from external storage, such as Google Drive, add the required OAuth credentials for the connectors that you want to use. These settings can be populated automatically if OpenRAG detects these credentials in a ",(0,i.jsx)(n.code,{children:".env"})," file in the OpenRAG installation directory."]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.strong,{children:"Amazon"}),": Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on ",(0,i.jsx)(n.a,{href:"https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html",children:"Configuring access to AWS applications"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.strong,{children:"Google"}),": Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the ",(0,i.jsx)(n.a,{href:"https://console.cloud.google.com/apis/credentials",children:"Google Cloud Console"}),". For more information, see the ",(0,i.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client documentation"}),"."]}),"\n",(0,i.jsxs)(n.li,{children:[(0,i.jsx)(n.strong,{children:"Microsoft"}),": For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide ",(0,i.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online",children:"Azure application registration credentials for SharePoint and OneDrive"}),". For more information, see the ",(0,i.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client documentation"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:["You can ",(0,i.jsx)(n.a,{href:"/ingestion#oauth-ingestion",children:"manage OAuth credentials"})," later, but it is recommended to configure them during initial set up."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"The OpenRAG TUI presents redirect URIs for your OAuth app.\nThese are the URLs your OAuth provider will redirect back to after user sign-in.\nRegister these redirect values with your OAuth provider as they are presented in the TUI."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Click ",(0,i.jsx)(n.strong,{children:"Save Configuration"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start OpenRAG, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),".\nStartup pulls container images and runs them, so it can take some time.\nWhen startup is complete, the TUI displays the following:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"Services started successfully\nCommand completed successfully\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To start the Docling service, under ",(0,i.jsx)(n.strong,{children:"Native Services"}),", click ",(0,i.jsx)(n.strong,{children:"Start"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["To open the OpenRAG application, navigate to the TUI main menu, and then click ",(0,i.jsx)(n.strong,{children:"Open App"}),".\nAlternatively, in your browser, navigate to ",(0,i.jsx)(n.code,{children:"localhost:3000"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"If you enabled OAuth connectors, you must sign in to your OAuth provider before being redirected to your OpenRAG instance."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Two additional variables are available for ",(0,i.jsx)(n.strong,{children:"Advanced Setup"})," at this point.\nOnly change these variables if you have a non-default network configuration for your deployment, such as using a reverse proxy or custom domain."]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.code,{children:"LANGFLOW_PUBLIC_URL"}),": Sets the base address to access the Langflow web interface. This is where users interact with flows in a browser."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.code,{children:"WEBHOOK_BASE_URL"}),": Sets the base address of the OpenRAG OAuth connector endpoint.\nSupported webhook endpoints:"]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:"Amazon S3: Not applicable."}),"\n",(0,i.jsxs)(n.li,{children:["Google Drive: ",(0,i.jsx)(n.code,{children:"/connectors/google_drive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["OneDrive: ",(0,i.jsx)(n.code,{children:"/connectors/onedrive/webhook"})]}),"\n",(0,i.jsxs)(n.li,{children:["SharePoint: ",(0,i.jsx)(n.code,{children:"/connectors/sharepoint/webhook"})]}),"\n"]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]}),"\n"]}),"\n"]})})]}),"\n",(0,i.jsx)(a.Ay,{}),"\n",(0,i.jsx)(n.h2,{id:"exit-the-openrag-tui",children:"Exit the OpenRAG TUI"}),"\n",(0,i.jsxs)(n.p,{children:["To exit the OpenRAG TUI, navigate to the main menu, and then press ",(0,i.jsx)("kbd",{children:"q"}),".\nThe OpenRAG containers continue to run until they are stopped.\nFor more information, see ",(0,i.jsx)(n.a,{href:"#tui-container-management",children:"Manage OpenRAG containers with the TUI "}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["To relaunch the TUI, run ",(0,i.jsx)(n.code,{children:"uv run openrag"}),".\nIf you installed OpenRAG with ",(0,i.jsx)(n.code,{children:"uvx"}),", run ",(0,i.jsx)(n.code,{children:"uvx openrag"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"tui-container-management",children:"Manage OpenRAG containers with the TUI"}),"\n",(0,i.jsx)(n.p,{children:"After installation, the TUI can deploy, manage, and upgrade your OpenRAG containers."}),"\n",(0,i.jsx)(n.h3,{id:"diagnostics",children:"Diagnostics"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Diagnostics"})," menu provides health monitoring for your container runtimes and monitoring of your OpenSearch security."]}),"\n",(0,i.jsx)(n.h3,{id:"status",children:"Status"}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Status"})," menu displays information on your container deployment.\nHere you can check container health, find your service ports, view logs, and upgrade your containers."]}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Logs"}),": To view streaming logs, select the container you want to view, and press ",(0,i.jsx)("kbd",{children:"l"}),".\nTo copy the logs, click ",(0,i.jsx)(n.strong,{children:"Copy to Clipboard"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Upgrade"}),": Check for updates. For more information, see ",(0,i.jsx)(n.a,{href:"#upgrade",children:"upgrade OpenRAG"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Reset"}),": This is a destructive action that ",(0,i.jsx)(n.a,{href:"#reset-containers",children:"resets your containers"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.strong,{children:"Native services"}),": From the ",(0,i.jsx)(n.strong,{children:"Status"})," menu, you can view the status, port, and process ID (PID) of the OpenRAG native services.\nYou can also click ",(0,i.jsx)(n.strong,{children:"Stop"})," or ",(0,i.jsx)(n.strong,{children:"Restart"})," to stop and start OpenRAG native services."]}),"\n",(0,i.jsxs)(n.p,{children:["A ",(0,i.jsx)(n.em,{children:"native service"})," in OpenRAG is a service that runs locally on your machine, not within a container. For example, the ",(0,i.jsx)(n.code,{children:"docling serve"})," process is an OpenRAG native service because this document processing service runs on your local machine, separate from the OpenRAG containers."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h3,{id:"reset-containers",children:"Reset containers"}),"\n",(0,i.jsx)(n.admonition,{type:"warning",children:(0,i.jsx)(n.p,{children:"This is a destructive action that destroys and recreates all of your OpenRAG containers."})}),"\n",(0,i.jsxs)(n.p,{children:["To destroy and recreate your OpenRAG containers, go to the TUI ",(0,i.jsxs)(n.a,{href:"#status",children:[(0,i.jsx)(n.strong,{children:"Status"})," menu"]}),", and then click ",(0,i.jsx)(n.strong,{children:"Reset"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["The ",(0,i.jsx)(n.strong,{children:"Reset"})," function runs two commands. First, it stops and removes all containers, volumes, and local images:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"docker compose down --volumes --remove-orphans --rmi local\n"})}),"\n",(0,i.jsxs)(n.p,{children:["Then, it removes any additional Docker objects with ",(0,i.jsx)(n.code,{children:"docker system prune -f"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["If you reset your containers as part of reinstalling OpenRAG, continue the ",(0,i.jsx)(n.a,{href:"#reinstall",children:"reinstallation process"})," after resetting the containers."]}),"\n",(0,i.jsx)(n.h3,{id:"start-all-services",children:"Start all services"}),"\n",(0,i.jsxs)(n.p,{children:["On the TUI main page, click ",(0,i.jsx)(n.strong,{children:"Start All Services"})," to start the OpenRAG containers and launch OpenRAG itself."]}),"\n",(0,i.jsx)(n.p,{children:"When you start all services, the following processes happen:"}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["OpenRAG automatically detects your container runtime, and then checks if your machine has compatible GPU support by checking for ",(0,i.jsx)(n.code,{children:"CUDA"}),", ",(0,i.jsx)(n.code,{children:"NVIDIA_SMI"}),", and Docker/Podman runtime support. This check determines which Docker Compose file OpenRAG uses."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["OpenRAG pulls the OpenRAG container images with ",(0,i.jsx)(n.code,{children:"docker compose pull"})," if any images are missing."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["OpenRAG deploys the containers with ",(0,i.jsx)(n.code,{children:"docker compose up -d"}),"."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"upgrade",children:"Upgrade OpenRAG"}),"\n",(0,i.jsx)(n.p,{children:"To upgrade OpenRAG, upgrade the OpenRAG Python package, and then upgrade the OpenRAG containers."}),"\n",(0,i.jsxs)(n.p,{children:["This is a two part process because upgrading the OpenRAG Python package updates the TUI and Python code, but the container versions are controlled by environment variables in your ",(0,i.jsx)(n.code,{children:".env"})," file."]}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Stop your OpenRAG containers: In the OpenRAG TUI, go to the ",(0,i.jsx)(n.strong,{children:"Status"})," menu, and then click ",(0,i.jsx)(n.strong,{children:"Stop Services"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Upgrade the OpenRAG Python package to the latest version from ",(0,i.jsx)(n.a,{href:"https://pypi.org/project/openrag/",children:"PyPI"}),"."]}),"\n",(0,i.jsxs)(o.A,{groupId:"Installation method",children:[(0,i.jsxs)(l.A,{value:"installer",label:"Automatic installer or uvx",default:!0,children:[(0,i.jsxs)(n.p,{children:["Use these steps to upgrade the Python package if you installed OpenRAG using the automatic installer or ",(0,i.jsx)(n.code,{children:"uvx"}),":"]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Navigate to your OpenRAG workspace directory:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"cd openrag-workspace\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Upgrade the OpenRAG package:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx --from openrag openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To upgrade to a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx --from openrag==0.1.33 openrag\n"})}),"\n"]}),"\n"]})]}),(0,i.jsxs)(l.A,{value:"uv-add",label:"Python project (uv add)",children:[(0,i.jsxs)(n.p,{children:["Use these steps to upgrade the Python package if you installed OpenRAG in a Python project with ",(0,i.jsx)(n.code,{children:"uv add"}),":"]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Navigate to your project directory:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"cd YOUR_PROJECT_NAME\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Update OpenRAG to the latest version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add --upgrade openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To upgrade to a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv add --upgrade openrag==0.1.33\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Start the OpenRAG TUI:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]}),(0,i.jsxs)(l.A,{value:"uv-pip",label:"Virtual environment (uv pip install)",children:[(0,i.jsxs)(n.p,{children:["Use these steps to upgrade the Python package if you installed OpenRAG in a venv with ",(0,i.jsx)(n.code,{children:"uv pip install"}),":"]}),(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Activate your virtual environment."}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Upgrade OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv pip install --upgrade openrag\n"})}),"\n",(0,i.jsx)(n.p,{children:"To upgrade to a specific version:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv pip install --upgrade openrag==0.1.33\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Start the OpenRAG TUI:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv run openrag\n"})}),"\n"]}),"\n"]})]})]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Start the upgraded OpenRAG containers: In the OpenRAG TUI, click ",(0,i.jsx)(n.strong,{children:"Start All Services"}),", and then wait while the containers start."]}),"\n",(0,i.jsxs)(n.p,{children:["After upgrading the Python package, OpenRAG runs ",(0,i.jsx)(n.code,{children:"docker compose pull"})," to get the appropriate container images matching the version specified in your OpenRAG ",(0,i.jsx)(n.code,{children:".env"})," file. Then, it recreates the containers with the new images using ",(0,i.jsx)(n.code,{children:"docker compose up -d --force-recreate"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["In the ",(0,i.jsx)(n.code,{children:".env"})," file, the ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION"})," ",(0,i.jsx)(n.a,{href:"/reference/configuration#system-settings",children:"environment variable"})," is set to ",(0,i.jsx)(n.code,{children:"latest"})," by default, which it pulls the ",(0,i.jsx)(n.code,{children:"latest"})," available container images.\nTo pin a specific container image version, you can set ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION"})," to the desired container image version, such as ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION=0.1.33"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["However, when you upgrade the Python package, OpenRAG automatically attempts to keep the ",(0,i.jsx)(n.code,{children:"OPENRAG_VERSION"})," synchronized with the Python package version.\nYou might need to edit the ",(0,i.jsx)(n.code,{children:".env"})," file after upgrading the Python package to enforce a different container version.\nThe TUI warns you if it detects a version mismatch."]}),"\n",(0,i.jsxs)(n.p,{children:["If you get an error that ",(0,i.jsx)(n.code,{children:"langflow container already exists"})," error during upgrade, see ",(0,i.jsx)(n.a,{href:"/support/troubleshoot#langflow-container-already-exists-during-upgrade",children:"Langflow container already exists during upgrade"}),"."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["When the upgrade process is complete, you can close the ",(0,i.jsx)(n.strong,{children:"Status"})," window and continue using OpenRAG."]}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"reinstall",children:"Reinstall OpenRAG"}),"\n",(0,i.jsx)(n.p,{children:"To reinstall OpenRAG with a completely fresh setup:"}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["In the TUI ",(0,i.jsx)(n.strong,{children:"Status"})," menu, ",(0,i.jsx)(n.a,{href:"#reset-containers",children:"reset your containers"})," to destroy the existing OpenRAG containers and their data."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Optional: Delete your project's ",(0,i.jsx)(n.code,{children:".env"})," file."]}),"\n",(0,i.jsxs)(n.p,{children:["The Reset operation doesn't remove your project's ",(0,i.jsx)(n.code,{children:".env"})," file, so your passwords, API keys, and OAuth settings can be preserved.\nIf you delete the ",(0,i.jsx)(n.code,{children:".env"})," file, you must run the ",(0,i.jsx)(n.a,{href:"#setup",children:"Set up OpenRAG with the TUI"})," process again to create a new configuration file."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["Optional: Delete your OpenSearch knowledge base by deleting the contents of the ",(0,i.jsx)(n.code,{children:"./opensearch-data"})," folder in your OpenRAG installation directory."]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsxs)(n.p,{children:["In the TUI ",(0,i.jsx)(n.strong,{children:"Setup"})," menu, repeat the ",(0,i.jsx)(n.a,{href:"#setup",children:"Basic Setup"})," process:"]}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["Click ",(0,i.jsx)(n.strong,{children:"Start All Services"})," to pull container images and start them."]}),"\n",(0,i.jsxs)(n.li,{children:["Under ",(0,i.jsx)(n.strong,{children:"Native Services"}),", click ",(0,i.jsx)(n.strong,{children:"Start"})," to start the Docling service."]}),"\n",(0,i.jsxs)(n.li,{children:["Click ",(0,i.jsx)(n.strong,{children:"Open App"})," to open the OpenRAG application."]}),"\n",(0,i.jsxs)(n.li,{children:["Continue with ",(0,i.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding"}),"."]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:["If reinstalling OpenRAG and deleting the ",(0,i.jsx)(n.code,{children:".env"})," file doesn't reset setup or onboarding, see ",(0,i.jsx)(n.a,{href:"/support/troubleshoot#reinstalling-openrag-doesnt-reset-onboarding",children:"Reinstalling OpenRAG doesn't reset onboarding"}),"."]})]})}function j(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(x,{...e})}):x(e)}},5689:(e,n,s)=>{s.d(n,{A:()=>r});const r=s.p+"assets/images/OpenRAG_TUI_2025-09-10T13_04_11_757637-9441c53ba39162a88ac6c11cbeaed0e0.svg"}}]); \ No newline at end of file diff --git a/assets/js/eb5b356a.285e3b8b.js b/assets/js/eb5b356a.285e3b8b.js new file mode 100644 index 00000000..66f55301 --- /dev/null +++ b/assets/js/eb5b356a.285e3b8b.js @@ -0,0 +1 @@ +"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[2668],{5014:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>d,contentTitle:()=>c,default:()=>p,frontMatter:()=>l,metadata:()=>o,toc:()=>h});const o=JSON.parse('{"id":"support/troubleshoot","title":"Troubleshoot OpenRAG","description":"This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG.","source":"@site/docs/support/troubleshoot.mdx","sourceDirName":"support","slug":"/support/troubleshoot","permalink":"/support/troubleshoot","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/support/troubleshoot.mdx","tags":[],"version":"current","frontMatter":{"title":"Troubleshoot OpenRAG","slug":"/support/troubleshoot"},"sidebar":"tutorialSidebar","previous":{"title":"Environment variables","permalink":"/reference/configuration"}}');var i=s(4848),r=s(8453),a=s(1470),t=s(9365);const l={title:"Troubleshoot OpenRAG",slug:"/support/troubleshoot"},c=void 0,d={},h=[{value:"OpenSearch fails to start",id:"opensearch-fails-to-start",level:2},{value:"OpenRAG fails to start from the TUI with operation not supported",id:"openrag-fails-to-start-from-the-tui-with-operation-not-supported",level:2},{value:"OpenRAG installation fails with unable to get local issuer certificate",id:"openrag-installation-fails-with-unable-to-get-local-issuer-certificate",level:2},{value:"Langflow connection issues",id:"langflow-connection-issues",level:2},{value:"Container out of memory errors",id:"container-out-of-memory-errors",level:2},{value:"Memory issue with Podman on macOS",id:"memory-issue-with-podman-on-macos",level:2},{value:"Port conflicts",id:"port-conflicts",level:2},{value:"OCR ingestion fails (easyocr not installed)",id:"ocr-ingestion-fails-easyocr-not-installed",level:2},{value:"Upgrade fails due to Langflow container already exists",id:"langflow-container-already-exists-during-upgrade",level:2},{value:"Document ingestion or similarity search issues",id:"document-ingestion-or-similarity-search-issues",level:2}];function u(e){const n={a:"a",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",pre:"pre",ul:"ul",...(0,r.R)(),...e.components};return(0,i.jsxs)(i.Fragment,{children:[(0,i.jsx)(n.p,{children:"This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG."}),"\n",(0,i.jsx)(n.h2,{id:"opensearch-fails-to-start",children:"OpenSearch fails to start"}),"\n",(0,i.jsxs)(n.p,{children:["Check that ",(0,i.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," set in ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"})," meets requirements.\nThe password must contain at least 8 characters, and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character that is strong."]}),"\n",(0,i.jsx)(n.h2,{id:"openrag-fails-to-start-from-the-tui-with-operation-not-supported",children:"OpenRAG fails to start from the TUI with operation not supported"}),"\n",(0,i.jsxs)(n.p,{children:["This error occurs when starting OpenRAG with the TUI in ",(0,i.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install",children:"WSL (Windows Subsystem for Linux)"}),"."]}),"\n",(0,i.jsxs)(n.p,{children:["The error occurs because OpenRAG is running within a WSL environment, so ",(0,i.jsx)(n.code,{children:"webbrowser.open()"})," can't launch a browser automatically."]}),"\n",(0,i.jsxs)(n.p,{children:["To access the OpenRAG application, open a web browser and enter ",(0,i.jsx)(n.code,{children:"http://localhost:3000"})," in the address bar."]}),"\n",(0,i.jsx)(n.h2,{id:"openrag-installation-fails-with-unable-to-get-local-issuer-certificate",children:"OpenRAG installation fails with unable to get local issuer certificate"}),"\n",(0,i.jsxs)(n.p,{children:["If you are installing OpenRAG on macOS, and the installation fails with ",(0,i.jsx)(n.code,{children:"unable to get local issuer certificate"}),", run the following command, and then retry the installation:"]}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:'open "/Applications/Python VERSION/Install Certificates.command"\n'})}),"\n",(0,i.jsxs)(n.p,{children:["Replace ",(0,i.jsx)(n.code,{children:"VERSION"})," with your installed Python version, such as ",(0,i.jsx)(n.code,{children:"3.13"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"langflow-connection-issues",children:"Langflow connection issues"}),"\n",(0,i.jsxs)(n.p,{children:["Verify the ",(0,i.jsx)(n.code,{children:"LANGFLOW_SUPERUSER"})," credentials set in ",(0,i.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"})," are correct."]}),"\n",(0,i.jsx)(n.h2,{id:"container-out-of-memory-errors",children:"Container out of memory errors"}),"\n",(0,i.jsxs)(n.p,{children:["Increase Docker memory allocation or use ",(0,i.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml",children:"docker-compose-cpu.yml"})," to deploy OpenRAG."]}),"\n",(0,i.jsx)(n.h2,{id:"memory-issue-with-podman-on-macos",children:"Memory issue with Podman on macOS"}),"\n",(0,i.jsx)(n.p,{children:"If you're using Podman on macOS, you might need to increase VM memory on your Podman machine.\nThis example increases the machine size to 8 GB of RAM, which should be sufficient to run OpenRAG."}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"podman machine stop\npodman machine rm\npodman machine init --memory 8192 # 8 GB example\npodman machine start\n"})}),"\n",(0,i.jsx)(n.h2,{id:"port-conflicts",children:"Port conflicts"}),"\n",(0,i.jsx)(n.p,{children:"Ensure ports 3000, 7860, 8000, 9200, 5601 are available."}),"\n",(0,i.jsx)(n.h2,{id:"ocr-ingestion-fails-easyocr-not-installed",children:"OCR ingestion fails (easyocr not installed)"}),"\n",(0,i.jsxs)(n.p,{children:["If Docling ingestion fails with an OCR-related error and mentions ",(0,i.jsx)(n.code,{children:"easyocr"})," is missing, this is likely due to a stale ",(0,i.jsx)(n.code,{children:"uv"})," cache."]}),"\n",(0,i.jsxs)(n.p,{children:[(0,i.jsx)(n.code,{children:"easyocr"})," is already included as a dependency in OpenRAG's ",(0,i.jsx)(n.code,{children:"pyproject.toml"}),". Project-managed installations using ",(0,i.jsx)(n.code,{children:"uv sync"})," and ",(0,i.jsx)(n.code,{children:"uv run"})," always sync dependencies directly from your ",(0,i.jsx)(n.code,{children:"pyproject.toml"}),", so they should have ",(0,i.jsx)(n.code,{children:"easyocr"})," installed."]}),"\n",(0,i.jsxs)(n.p,{children:["If you're running OpenRAG with ",(0,i.jsx)(n.code,{children:"uvx openrag"}),", ",(0,i.jsx)(n.code,{children:"uvx"})," creates a cached, ephemeral environment that doesn't modify your project. This cache can become stale."]}),"\n",(0,i.jsxs)(n.p,{children:["On macOS, this cache directory is typically a user cache directory such as ",(0,i.jsx)(n.code,{children:"/Users/USER_NAME/.cache/uv"}),"."]}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"To clear the uv cache, run:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uv cache clean\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Start OpenRAG:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"uvx openrag\n"})}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.p,{children:["If you don't need OCR, you can disable OCR-based processing in your ingestion settings to avoid requiring ",(0,i.jsx)(n.code,{children:"easyocr"}),"."]}),"\n",(0,i.jsx)(n.h2,{id:"langflow-container-already-exists-during-upgrade",children:"Upgrade fails due to Langflow container already exists"}),"\n",(0,i.jsxs)(n.p,{children:["If you encounter a ",(0,i.jsx)(n.code,{children:"langflow container already exists"})," error when upgrading OpenRAG, this typically means you upgraded OpenRAG with ",(0,i.jsx)(n.code,{children:"uv"}),", but you didn't remove or upgrade containers from a previous installation."]}),"\n",(0,i.jsx)(n.p,{children:"To resolve this issue, do the following:"}),"\n",(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Remove only the Langflow container:"}),"\n",(0,i.jsxs)(a.A,{groupId:"Container software",children:[(0,i.jsx)(t.A,{value:"Podman",label:"Podman",children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Stop the Langflow container:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"podman stop langflow\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Remove the Langflow container:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"podman rm langflow --force\n"})}),"\n"]}),"\n"]})}),(0,i.jsx)(t.A,{value:"Docker",label:"Docker",default:!0,children:(0,i.jsxs)(n.ol,{children:["\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Stop the Langflow container:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"docker stop langflow\n"})}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Remove the Langflow container:"}),"\n",(0,i.jsx)(n.pre,{children:(0,i.jsx)(n.code,{className:"language-bash",children:"docker rm langflow --force\n"})}),"\n"]}),"\n"]})})]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Retry the upgrade:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/docker#upgrade-containers",children:"Upgrade self-managed containers"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/install#upgrade-containers",children:"Upgrade TUI-managed containers"})}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"If reinstalling the Langflow container doesn't resolve the issue, you must reset your OpenRAG deployment:"}),"\n",(0,i.jsxs)(n.ul,{children:["\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/docker#reset-containers",children:"Reset self-managed containers"})}),"\n",(0,i.jsx)(n.li,{children:(0,i.jsx)(n.a,{href:"/install#reset-containers",children:"Reset TUI-managed containers"})}),"\n"]}),"\n"]}),"\n",(0,i.jsxs)(n.li,{children:["\n",(0,i.jsx)(n.p,{children:"Retry the upgrade."}),"\n"]}),"\n"]}),"\n",(0,i.jsx)(n.h2,{id:"document-ingestion-or-similarity-search-issues",children:"Document ingestion or similarity search issues"}),"\n",(0,i.jsxs)(n.p,{children:["See ",(0,i.jsx)(n.a,{href:"/ingestion#troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"."]})]})}function p(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,i.jsx)(n,{...e,children:(0,i.jsx)(u,{...e})}):u(e)}}}]); \ No newline at end of file diff --git a/assets/js/eb5b356a.6122add2.js b/assets/js/eb5b356a.6122add2.js deleted file mode 100644 index 8ebeea39..00000000 --- a/assets/js/eb5b356a.6122add2.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[2668],{5014:(e,n,s)=>{s.r(n),s.d(n,{assets:()=>d,contentTitle:()=>c,default:()=>u,frontMatter:()=>t,metadata:()=>r,toc:()=>h});const r=JSON.parse('{"id":"support/troubleshoot","title":"Troubleshoot OpenRAG","description":"This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG.","source":"@site/docs/support/troubleshoot.mdx","sourceDirName":"support","slug":"/support/troubleshoot","permalink":"/support/troubleshoot","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/support/troubleshoot.mdx","tags":[],"version":"current","frontMatter":{"title":"Troubleshoot OpenRAG","slug":"/support/troubleshoot"},"sidebar":"tutorialSidebar","previous":{"title":"Environment variables","permalink":"/reference/configuration"}}');var o=s(4848),i=s(8453),a=s(1470),l=s(9365);const t={title:"Troubleshoot OpenRAG",slug:"/support/troubleshoot"},c=void 0,d={},h=[{value:"OpenSearch fails to start",id:"opensearch-fails-to-start",level:2},{value:"OpenRAG fails to start from the TUI with operation not supported",id:"openrag-fails-to-start-from-the-tui-with-operation-not-supported",level:2},{value:"OpenRAG installation fails with unable to get local issuer certificate",id:"openrag-installation-fails-with-unable-to-get-local-issuer-certificate",level:2},{value:"Langflow connection issues",id:"langflow-connection-issues",level:2},{value:"Container out of memory errors",id:"container-out-of-memory-errors",level:2},{value:"Memory issue with Podman on macOS",id:"memory-issue-with-podman-on-macos",level:2},{value:"Port conflicts",id:"port-conflicts",level:2},{value:"OCR ingestion fails (easyocr not installed)",id:"ocr-ingestion-fails-easyocr-not-installed",level:2},{value:"Upgrade fails due to Langflow container already exists",id:"langflow-container-already-exists-during-upgrade",level:2},{value:"Reinstalling OpenRAG doesn't reset onboarding",id:"reinstalling-openrag-doesnt-reset-onboarding",level:2},{value:"Document ingestion or similarity search issues",id:"document-ingestion-or-similarity-search-issues",level:2}];function p(e){const n={a:"a",admonition:"admonition",code:"code",em:"em",h2:"h2",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",...(0,i.R)(),...e.components};return(0,o.jsxs)(o.Fragment,{children:[(0,o.jsx)(n.p,{children:"This page provides troubleshooting advice for issues you might encounter when using OpenRAG or contributing to OpenRAG."}),"\n",(0,o.jsx)(n.h2,{id:"opensearch-fails-to-start",children:"OpenSearch fails to start"}),"\n",(0,o.jsxs)(n.p,{children:["Check that ",(0,o.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," set in ",(0,o.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"})," meets requirements.\nThe password must contain at least 8 characters, and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character that is strong."]}),"\n",(0,o.jsx)(n.h2,{id:"openrag-fails-to-start-from-the-tui-with-operation-not-supported",children:"OpenRAG fails to start from the TUI with operation not supported"}),"\n",(0,o.jsxs)(n.p,{children:["This error occurs when starting OpenRAG with the TUI in ",(0,o.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/windows/wsl/install",children:"WSL (Windows Subsystem for Linux)"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["The error occurs because OpenRAG is running within a WSL environment, so ",(0,o.jsx)(n.code,{children:"webbrowser.open()"})," can't launch a browser automatically."]}),"\n",(0,o.jsxs)(n.p,{children:["To access the OpenRAG application, open a web browser and enter ",(0,o.jsx)(n.code,{children:"http://localhost:3000"})," in the address bar."]}),"\n",(0,o.jsx)(n.h2,{id:"openrag-installation-fails-with-unable-to-get-local-issuer-certificate",children:"OpenRAG installation fails with unable to get local issuer certificate"}),"\n",(0,o.jsxs)(n.p,{children:["If you are installing OpenRAG on macOS, and the installation fails with ",(0,o.jsx)(n.code,{children:"unable to get local issuer certificate"}),", run the following command, and then retry the installation:"]}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:'open "/Applications/Python VERSION/Install Certificates.command"\n'})}),"\n",(0,o.jsxs)(n.p,{children:["Replace ",(0,o.jsx)(n.code,{children:"VERSION"})," with your installed Python version, such as ",(0,o.jsx)(n.code,{children:"3.13"}),"."]}),"\n",(0,o.jsx)(n.h2,{id:"langflow-connection-issues",children:"Langflow connection issues"}),"\n",(0,o.jsxs)(n.p,{children:["Verify the ",(0,o.jsx)(n.code,{children:"LANGFLOW_SUPERUSER"})," credentials set in ",(0,o.jsx)(n.a,{href:"/reference/configuration",children:"Environment variables"})," are correct."]}),"\n",(0,o.jsx)(n.h2,{id:"container-out-of-memory-errors",children:"Container out of memory errors"}),"\n",(0,o.jsxs)(n.p,{children:["Increase Docker memory allocation or use ",(0,o.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/docker-compose-cpu.yml",children:"docker-compose-cpu.yml"})," to deploy OpenRAG."]}),"\n",(0,o.jsx)(n.h2,{id:"memory-issue-with-podman-on-macos",children:"Memory issue with Podman on macOS"}),"\n",(0,o.jsx)(n.p,{children:"If you're using Podman on macOS, you might need to increase VM memory on your Podman machine.\nThis example increases the machine size to 8 GB of RAM, which should be sufficient to run OpenRAG."}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman machine stop\npodman machine rm\npodman machine init --memory 8192 # 8 GB example\npodman machine start\n"})}),"\n",(0,o.jsx)(n.h2,{id:"port-conflicts",children:"Port conflicts"}),"\n",(0,o.jsx)(n.p,{children:"Ensure ports 3000, 7860, 8000, 9200, 5601 are available."}),"\n",(0,o.jsx)(n.h2,{id:"ocr-ingestion-fails-easyocr-not-installed",children:"OCR ingestion fails (easyocr not installed)"}),"\n",(0,o.jsxs)(n.p,{children:["If Docling ingestion fails with an OCR-related error and mentions ",(0,o.jsx)(n.code,{children:"easyocr"})," is missing, this is likely due to a stale ",(0,o.jsx)(n.code,{children:"uv"})," cache."]}),"\n",(0,o.jsxs)(n.p,{children:[(0,o.jsx)(n.code,{children:"easyocr"})," is already included as a dependency in OpenRAG's ",(0,o.jsx)(n.code,{children:"pyproject.toml"}),". Project-managed installations using ",(0,o.jsx)(n.code,{children:"uv sync"})," and ",(0,o.jsx)(n.code,{children:"uv run"})," always sync dependencies directly from your ",(0,o.jsx)(n.code,{children:"pyproject.toml"}),", so they should have ",(0,o.jsx)(n.code,{children:"easyocr"})," installed."]}),"\n",(0,o.jsxs)(n.p,{children:["If you're running OpenRAG with ",(0,o.jsx)(n.code,{children:"uvx openrag"}),", ",(0,o.jsx)(n.code,{children:"uvx"})," creates a cached, ephemeral environment that doesn't modify your project. This cache can become stale."]}),"\n",(0,o.jsxs)(n.p,{children:["On macOS, this cache directory is typically a user cache directory such as ",(0,o.jsx)(n.code,{children:"/Users/USER_NAME/.cache/uv"}),"."]}),"\n",(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"To clear the uv cache, run:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"uv cache clean\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Start OpenRAG:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"uvx openrag\n"})}),"\n"]}),"\n"]}),"\n",(0,o.jsxs)(n.p,{children:["If you don't need OCR, you can disable OCR-based processing in your ingestion settings to avoid requiring ",(0,o.jsx)(n.code,{children:"easyocr"}),"."]}),"\n",(0,o.jsx)(n.h2,{id:"langflow-container-already-exists-during-upgrade",children:"Upgrade fails due to Langflow container already exists"}),"\n",(0,o.jsxs)(n.p,{children:["If you encounter a ",(0,o.jsx)(n.code,{children:"langflow container already exists"})," error when upgrading OpenRAG, this typically means you upgraded OpenRAG with ",(0,o.jsx)(n.code,{children:"uv"}),", but you didn't remove or upgrade containers from a previous installation."]}),"\n",(0,o.jsx)(n.p,{children:"To resolve this issue, do the following:"}),"\n",(0,o.jsxs)(n.p,{children:["First, try removing only the Langflow container, and then retry the upgrade in the OpenRAG TUI by clicking ",(0,o.jsx)(n.strong,{children:"Status"})," and then ",(0,o.jsx)(n.strong,{children:"Upgrade"}),"."]}),"\n",(0,o.jsxs)(a.A,{groupId:"Container software",children:[(0,o.jsx)(l.A,{value:"Podman",label:"Podman",children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Stop the Langflow container:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman stop langflow\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove the Langflow container:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman rm langflow --force\n"})}),"\n"]}),"\n"]})}),(0,o.jsx)(l.A,{value:"Docker",label:"Docker",default:!0,children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Stop the Langflow container:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker stop langflow\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove the Langflow container:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker rm langflow --force\n"})}),"\n"]}),"\n"]})})]}),"\n",(0,o.jsx)(n.p,{children:"If reinstalling the Langflow container doesn't resolve the issue, you must reset to a fresh installation by removing all OpenRAG containers and data.\nThen, you can retry the upgrade."}),"\n",(0,o.jsx)(n.admonition,{type:"warning",children:(0,o.jsxs)(n.p,{children:["This is a destructive operation that destroys your OpenRAG containers and their contents.\nHowever, your ",(0,o.jsx)(n.code,{children:".env"})," file (configuration settings) and ",(0,o.jsx)(n.code,{children:"./opensearch-data"})," (OpenSearch knowledge base) are preserved."]})}),"\n",(0,o.jsxs)(n.p,{children:["To reset your installation, stop your containers, and then completely remove them.\nAfter removing the containers, retry the upgrade in the OpenRAG TUI by clicking ",(0,o.jsx)(n.strong,{children:"Status"})," and then ",(0,o.jsx)(n.strong,{children:"Upgrade"}),"."]}),"\n",(0,o.jsxs)(a.A,{groupId:"Container software",children:[(0,o.jsx)(l.A,{value:"Podman",label:"Podman",children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Stop all running containers:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman stop --all\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all containers, including stopped containers:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman rm --all --force\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all images:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman rmi --all --force\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all volumes:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman volume prune --force\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all networks except the default network:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman network prune --force\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Clean up any leftover data:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"podman system prune --all --force --volumes\n"})}),"\n"]}),"\n"]})}),(0,o.jsx)(l.A,{value:"Docker",label:"Docker",default:!0,children:(0,o.jsxs)(n.ol,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Stop all running containers:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker stop $(docker ps -q)\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all containers, including stopped containers:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker rm --force $(docker ps -aq)\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all images:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker rmi --force $(docker images -q)\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all volumes:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker volume prune --force\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Remove all networks except the default network:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker network prune --force\n"})}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsx)(n.p,{children:"Clean up any leftover data:"}),"\n",(0,o.jsx)(n.pre,{children:(0,o.jsx)(n.code,{className:"language-bash",children:"docker system prune --all --force --volumes\n"})}),"\n"]}),"\n"]})})]}),"\n",(0,o.jsx)(n.h2,{id:"reinstalling-openrag-doesnt-reset-onboarding",children:"Reinstalling OpenRAG doesn't reset onboarding"}),"\n",(0,o.jsxs)(n.p,{children:["If you ",(0,o.jsx)(n.a,{href:"/install#reinstall",children:"reinstall OpenRAG"}),", you can restore your installation to it's original, default state by resetting the containers ",(0,o.jsx)(n.em,{children:"and"})," deleting the ",(0,o.jsx)(n.code,{children:".env"})," file."]}),"\n",(0,o.jsx)(n.p,{children:"When you start OpenRAG after doing this, you should be prompted to go through the initial setup and onboarding process again."}),"\n",(0,o.jsxs)(n.p,{children:["Due to a known issue, the onboarding process might not reset when you reinstall OpenRAG.\nIf this occurs, ",(0,o.jsx)(n.a,{href:"/install#install",children:"install OpenRAG in a new Python project directory"})," (with ",(0,o.jsx)(n.code,{children:"uv init"})," and ",(0,o.jsx)(n.code,{children:"uv add openrag"}),")."]}),"\n",(0,o.jsx)(n.h2,{id:"document-ingestion-or-similarity-search-issues",children:"Document ingestion or similarity search issues"}),"\n",(0,o.jsxs)(n.p,{children:["See ",(0,o.jsx)(n.a,{href:"/ingestion#troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"."]})]})}function u(e={}){const{wrapper:n}={...(0,i.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(p,{...e})}):p(e)}}}]); \ No newline at end of file diff --git a/assets/js/runtime~main.f46e5be3.js b/assets/js/runtime~main.a3cb74e9.js similarity index 91% rename from assets/js/runtime~main.f46e5be3.js rename to assets/js/runtime~main.a3cb74e9.js index c9c90e8d..3ec20926 100644 --- a/assets/js/runtime~main.f46e5be3.js +++ b/assets/js/runtime~main.a3cb74e9.js @@ -1 +1 @@ -(()=>{"use strict";var e,a,r,t,o,c={},d={};function f(e){var a=d[e];if(void 0!==a)return a.exports;var r=d[e]={id:e,loaded:!1,exports:{}};return c[e].call(r.exports,r,r.exports,f),r.loaded=!0,r.exports}f.m=c,f.c=d,e=[],f.O=(a,r,t,o)=>{if(!r){var c=1/0;for(i=0;i=o)&&Object.keys(f.O).every(e=>f.O[e](r[n]))?r.splice(n--,1):(d=!1,o0&&e[i-1][2]>o;i--)e[i]=e[i-1];e[i]=[r,t,o]},f.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return f.d(a,{a:a}),a},r=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,f.t=function(e,t){if(1&t&&(e=this(e)),8&t)return e;if("object"==typeof e&&e){if(4&t&&e.__esModule)return e;if(16&t&&"function"==typeof e.then)return e}var o=Object.create(null);f.r(o);var c={};a=a||[null,r({}),r([]),r(r)];for(var d=2&t&&e;("object"==typeof d||"function"==typeof d)&&!~a.indexOf(d);d=r(d))Object.getOwnPropertyNames(d).forEach(a=>c[a]=()=>e[a]);return c.default=()=>e,f.d(o,c),o},f.d=(e,a)=>{for(var r in a)f.o(a,r)&&!f.o(e,r)&&Object.defineProperty(e,r,{enumerable:!0,get:a[r]})},f.f={},f.e=e=>Promise.all(Object.keys(f.f).reduce((a,r)=>(f.f[r](e,a),a),[])),f.u=e=>"assets/js/"+({98:"af920ffe",571:"0ba6a408",1567:"22dd74f7",2076:"common",2272:"749371cc",2668:"eb5b356a",3207:"27b4a875",5490:"71478a5d",5742:"aba21aa0",5750:"d0314b07",6919:"ca2c3c0c",7098:"a7bd4aaa",8401:"17896441",9026:"c8078f0a",9048:"a94703ab",9172:"e633a5ea",9532:"33362219",9647:"5e95c892"}[e]||e)+"."+{98:"f3ba20bd",165:"7b1d067d",291:"1b64972f",571:"dd2bbad9",617:"e8fa27b8",1e3:"f9af7a41",1203:"b618bb61",1567:"bcf46a1b",1741:"b927934f",1746:"c8a2211b",2076:"6bf331d5",2130:"09232a19",2237:"70c48bab",2272:"ed259beb",2279:"12a16213",2291:"2c483c10",2325:"7c4239a7",2334:"daa16321",2492:"e6c78669",2668:"6122add2",2821:"e806cb03",3207:"565e61ee",3490:"026802f4",3815:"f87fc96a",4250:"77e8e6c6",4616:"b519f330",4802:"a8b2857c",4981:"61cf4b0a",5480:"cd9ee90b",5490:"7641035a",5742:"2f625fe1",5750:"86b4cd81",5901:"f6ccb00b",5955:"14464ff3",5996:"16c59d9f",6241:"11caa48e",6319:"4754ec94",6366:"0c77f825",6567:"b1623b8d",6919:"04fbbb9d",6992:"3c4d161b",7098:"9fea9356",7465:"7e0b8008",7592:"d6165eff",7873:"547873d3",7928:"5f633e47",8142:"5edfc34d",8249:"a2dd33e2",8401:"0d444abe",8565:"b7153d5a",8756:"704db1d5",9026:"7626c69e",9032:"67a3e002",9048:"fa9eaf65",9172:"f5bfbf12",9412:"d4dba7ab",9510:"81e2df6a",9532:"2a31613f",9647:"d28d5273"}[e]+".js",f.miniCssF=e=>{},f.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),t={},o="openrag-docs:",f.l=(e,a,r,c)=>{if(t[e])t[e].push(a);else{var d,n;if(void 0!==r)for(var b=document.getElementsByTagName("script"),i=0;i{d.onerror=d.onload=null,clearTimeout(s);var o=t[e];if(delete t[e],d.parentNode&&d.parentNode.removeChild(d),o&&o.forEach(e=>e(r)),a)return a(r)},s=setTimeout(u.bind(null,void 0,{type:"timeout",target:d}),12e4);d.onerror=u.bind(null,d.onerror),d.onload=u.bind(null,d.onload),n&&document.head.appendChild(d)}},f.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},f.p="/",f.gca=function(e){return e={17896441:"8401",33362219:"9532",af920ffe:"98","0ba6a408":"571","22dd74f7":"1567",common:"2076","749371cc":"2272",eb5b356a:"2668","27b4a875":"3207","71478a5d":"5490",aba21aa0:"5742",d0314b07:"5750",ca2c3c0c:"6919",a7bd4aaa:"7098",c8078f0a:"9026",a94703ab:"9048",e633a5ea:"9172","5e95c892":"9647"}[e]||e,f.p+f.u(e)},(()=>{var e={5354:0,1869:0};f.f.j=(a,r)=>{var t=f.o(e,a)?e[a]:void 0;if(0!==t)if(t)r.push(t[2]);else if(/^(1869|5354)$/.test(a))e[a]=0;else{var o=new Promise((r,o)=>t=e[a]=[r,o]);r.push(t[2]=o);var c=f.p+f.u(a),d=new Error;f.l(c,r=>{if(f.o(e,a)&&(0!==(t=e[a])&&(e[a]=void 0),t)){var o=r&&("load"===r.type?"missing":r.type),c=r&&r.target&&r.target.src;d.message="Loading chunk "+a+" failed.\n("+o+": "+c+")",d.name="ChunkLoadError",d.type=o,d.request=c,t[1](d)}},"chunk-"+a,a)}},f.O.j=a=>0===e[a];var a=(a,r)=>{var t,o,[c,d,n]=r,b=0;if(c.some(a=>0!==e[a])){for(t in d)f.o(d,t)&&(f.m[t]=d[t]);if(n)var i=n(f)}for(a&&a(r);b{"use strict";var e,a,r,t,o,c={},d={};function f(e){var a=d[e];if(void 0!==a)return a.exports;var r=d[e]={id:e,loaded:!1,exports:{}};return c[e].call(r.exports,r,r.exports,f),r.loaded=!0,r.exports}f.m=c,f.c=d,e=[],f.O=(a,r,t,o)=>{if(!r){var c=1/0;for(i=0;i=o)&&Object.keys(f.O).every(e=>f.O[e](r[n]))?r.splice(n--,1):(d=!1,o0&&e[i-1][2]>o;i--)e[i]=e[i-1];e[i]=[r,t,o]},f.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return f.d(a,{a:a}),a},r=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,f.t=function(e,t){if(1&t&&(e=this(e)),8&t)return e;if("object"==typeof e&&e){if(4&t&&e.__esModule)return e;if(16&t&&"function"==typeof e.then)return e}var o=Object.create(null);f.r(o);var c={};a=a||[null,r({}),r([]),r(r)];for(var d=2&t&&e;("object"==typeof d||"function"==typeof d)&&!~a.indexOf(d);d=r(d))Object.getOwnPropertyNames(d).forEach(a=>c[a]=()=>e[a]);return c.default=()=>e,f.d(o,c),o},f.d=(e,a)=>{for(var r in a)f.o(a,r)&&!f.o(e,r)&&Object.defineProperty(e,r,{enumerable:!0,get:a[r]})},f.f={},f.e=e=>Promise.all(Object.keys(f.f).reduce((a,r)=>(f.f[r](e,a),a),[])),f.u=e=>"assets/js/"+({98:"af920ffe",571:"0ba6a408",1567:"22dd74f7",2076:"common",2272:"749371cc",2668:"eb5b356a",3207:"27b4a875",5490:"71478a5d",5742:"aba21aa0",5750:"d0314b07",6919:"ca2c3c0c",7098:"a7bd4aaa",8401:"17896441",9026:"c8078f0a",9048:"a94703ab",9172:"e633a5ea",9532:"33362219",9647:"5e95c892"}[e]||e)+"."+{98:"f3ba20bd",165:"7b1d067d",291:"1b64972f",571:"eddb3e7e",617:"e8fa27b8",1e3:"f9af7a41",1203:"b618bb61",1567:"bcf46a1b",1741:"b927934f",1746:"c8a2211b",2076:"6bf331d5",2130:"09232a19",2237:"70c48bab",2272:"7bf165c7",2279:"12a16213",2291:"2c483c10",2325:"7c4239a7",2334:"daa16321",2492:"e6c78669",2668:"285e3b8b",2821:"e806cb03",3207:"8de9f81c",3490:"026802f4",3815:"f87fc96a",4250:"77e8e6c6",4616:"b519f330",4802:"a8b2857c",4981:"61cf4b0a",5480:"cd9ee90b",5490:"57b0036a",5742:"2f625fe1",5750:"26db9c14",5901:"f6ccb00b",5955:"14464ff3",5996:"16c59d9f",6241:"11caa48e",6319:"4754ec94",6366:"0c77f825",6567:"b1623b8d",6919:"7114a4c2",6992:"3c4d161b",7098:"9fea9356",7465:"7e0b8008",7592:"d6165eff",7873:"547873d3",7928:"5f633e47",8142:"5edfc34d",8249:"a2dd33e2",8401:"0d444abe",8565:"b7153d5a",8756:"704db1d5",9026:"7626c69e",9032:"67a3e002",9048:"fa9eaf65",9172:"f5bfbf12",9412:"d4dba7ab",9510:"81e2df6a",9532:"2a31613f",9647:"d28d5273"}[e]+".js",f.miniCssF=e=>{},f.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),t={},o="openrag-docs:",f.l=(e,a,r,c)=>{if(t[e])t[e].push(a);else{var d,n;if(void 0!==r)for(var b=document.getElementsByTagName("script"),i=0;i{d.onerror=d.onload=null,clearTimeout(s);var o=t[e];if(delete t[e],d.parentNode&&d.parentNode.removeChild(d),o&&o.forEach(e=>e(r)),a)return a(r)},s=setTimeout(u.bind(null,void 0,{type:"timeout",target:d}),12e4);d.onerror=u.bind(null,d.onerror),d.onload=u.bind(null,d.onload),n&&document.head.appendChild(d)}},f.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},f.p="/",f.gca=function(e){return e={17896441:"8401",33362219:"9532",af920ffe:"98","0ba6a408":"571","22dd74f7":"1567",common:"2076","749371cc":"2272",eb5b356a:"2668","27b4a875":"3207","71478a5d":"5490",aba21aa0:"5742",d0314b07:"5750",ca2c3c0c:"6919",a7bd4aaa:"7098",c8078f0a:"9026",a94703ab:"9048",e633a5ea:"9172","5e95c892":"9647"}[e]||e,f.p+f.u(e)},(()=>{var e={5354:0,1869:0};f.f.j=(a,r)=>{var t=f.o(e,a)?e[a]:void 0;if(0!==t)if(t)r.push(t[2]);else if(/^(1869|5354)$/.test(a))e[a]=0;else{var o=new Promise((r,o)=>t=e[a]=[r,o]);r.push(t[2]=o);var c=f.p+f.u(a),d=new Error;f.l(c,r=>{if(f.o(e,a)&&(0!==(t=e[a])&&(e[a]=void 0),t)){var o=r&&("load"===r.type?"missing":r.type),c=r&&r.target&&r.target.src;d.message="Loading chunk "+a+" failed.\n("+o+": "+c+")",d.name="ChunkLoadError",d.type=o,d.request=c,t[1](d)}},"chunk-"+a,a)}},f.O.j=a=>0===e[a];var a=(a,r)=>{var t,o,[c,d,n]=r,b=0;if(c.some(a=>0!==e[a])){for(t in d)f.o(d,t)&&(f.m[t]=d[t]);if(n)var i=n(f)}for(a&&a(r);b Chat in OpenRAG | OpenRAG - + @@ -12,10 +12,16 @@

Chat in OpenRAG

After you upload documents to your knowledge base, you can use the OpenRAG Chat feature to interact with your knowledge through natural language queries.

+

The OpenRAG Chat uses an LLM-powered agent to understand your queries, retrieve relevant information from your knowledge base, and generate context-aware responses. +The agent can also fetch information from URLs and new documents that you provide during the chat session. +To limit the knowledge available to the agent, use filters.

+

The agent can call specialized Model Context Protocol (MCP) tools to extend its capabilities. +To add or change the available tools, you must edit the OpenRAG OpenSearch Agent flow.

tip

Try chatting, uploading documents, and modifying chat settings in the quickstart.

OpenRAG OpenSearch Agent flow

-

When you use the OpenRAG Chat, the OpenRAG OpenSearch Agent flow runs in the background to retrieve relevant information from your knowledge base and generate a response.

-

If you inspect the flow in Langflow, you'll see that it is comprised of eight components that work together to ingest chat messages, retrieve relevant information from your knowledge base, and then generate responses.

+

When you use the OpenRAG Chat, the OpenRAG OpenSearch Agent flow runs in the background to retrieve relevant information from your knowledge base and generate a response.

+

If you inspect the flow in Langflow, you'll see that it is comprised of eight components that work together to ingest chat messages, retrieve relevant information from your knowledge base, and then generate responses. +When you inspect this flow, you can edit the components to customize the agent's behavior.

OpenRAG Open Search Agent Flow

  • @@ -34,7 +40,7 @@ One or more specialized tools can be attached to the Tools port

    Different models can change the style and content of the agent's responses, and some models might be better suited for certain tasks than others. If the agent doesn't seem to be handling requests well, try changing the model to see how the responses change. For example, fast models might be good for simple queries, but they might not have the depth of reasoning for complex, multi-faceted queries.

  • -

    MCP Tools component: Connected to the Agent component's Tools port, this component can be used to access any Model Context Protocol (MCP) server and the MCP tools provided by that server. In this case, your OpenRAG Langflow instance's Starter Project is the MCP server, and the OpenSearch URL Ingestion flow is the MCP tool. +

    MCP Tools component: Connected to the Agent component's Tools port, this component can be used to access any MCP server and the MCP tools provided by that server. In this case, your OpenRAG Langflow instance's Starter Project is the MCP server, and the OpenSearch URL Ingestion flow is the MCP tool. This flow fetches content from URLs, and then stores the content in your OpenRAG OpenSearch knowledge base. By serving this flow as an MCP tool, the agent can selectively call this tool if a URL is detected in the chat input.

  • diff --git a/docker/index.html b/docker/index.html index c673e98a..eed78337 100644 --- a/docker/index.html +++ b/docker/index.html @@ -4,7 +4,7 @@ Install OpenRAG containers | OpenRAG - + @@ -213,15 +213,141 @@ These commands are also available in the TUI's Upgrade containers

    Upgrade your containers to the latest version while preserving your data.

    docker compose pull
    docker compose up -d --force-recreate
    -

    Rebuild containers (destructive)

    -

    Reset state by rebuilding all of your containers. -Your OpenSearch and Langflow databases will be lost. -Documents stored in the ./openrag-documents directory will persist, since the directory is mounted as a volume in the OpenRAG backend container.

    +

    Reset containers (destructive)

    +
    warning

    These are destructive operations that reset your OpenRAG deployment to an initial state. +Be aware that data is lost and cannot be recovered after running these commands.

    +
      +
    • +

      Rebuild containers: This command destroys and recreates the containers. Data stored exclusively on the containers is lost, such as Langflow flows. +The .env file, config directory, ./openrag-documents directory, ./opensearch-data directory, and the conversations.json file are preserved.

      docker compose up --build --force-recreate --remove-orphans
      -

      Remove all containers and data (destructive)

      -

      Completely remove your OpenRAG installation and delete all data. -This deletes all of your data, including OpenSearch data, uploaded documents, and authentication.

      -
      docker compose down --volumes --remove-orphans --rmi local
      docker system prune -f
+

After resetting your containers, you must repeat application onboarding.

diff --git a/index.html b/index.html index d7c89184..ea9dd7d8 100644 --- a/index.html +++ b/index.html @@ -4,7 +4,7 @@ What is OpenRAG? | OpenRAG - + diff --git a/ingestion/index.html b/ingestion/index.html index 466082b8..34617bdd 100644 --- a/ingestion/index.html +++ b/ingestion/index.html @@ -4,7 +4,7 @@ Ingest knowledge | OpenRAG - + @@ -265,7 +265,7 @@ All errors were file-specific, and they didn't stop the pipeline.

  • Split excessively large files into smaller files before uploading.
  • Remove unusual embedded content, such as videos or animations, before uploading. Although Docling can replace some non-text content with placeholders during ingestion, some embedded content might cause errors.
  • -

    If the OpenRAG Chat doesn't seem to use your documents correctly, browse your knowledge base to confirm that the documents are uploaded in full, and the chunks are correct.

    +

    If the OpenRAG Chat doesn't seem to use your documents correctly, browse your knowledge base to confirm that the documents are uploaded in full, and the chunks are correct.

    If the documents are present and well-formed, check your knowledge filters. If a global filter is applied, make sure the expected documents are included in the global filter. If the global filter excludes any documents, the agent cannot access those documents unless you apply a chat-level filter or change the global filter.

    diff --git a/install/index.html b/install/index.html index 7b563e55..3664763d 100644 --- a/install/index.html +++ b/install/index.html @@ -4,7 +4,7 @@ Install OpenRAG with TUI | OpenRAG - + @@ -337,23 +337,35 @@ To copy the logs, click Copy to Clipboard.

    Upgrade: Check for updates. For more information, see upgrade OpenRAG.

  • -

    Reset: This is a destructive action that resets your containers.

    +

    Factory Reset: This is a destructive action that resets your containers.

  • -

    Native services: From the Status menu, you can view the status, port, and process ID (PID) of the OpenRAG native services. -You can also click Stop or Restart to stop and start OpenRAG native services.

    -

    A native service in OpenRAG is a service that runs locally on your machine, not within a container. For example, the docling serve process is an OpenRAG native service because this document processing service runs on your local machine, separate from the OpenRAG containers.

    +

    Native services: View and manage OpenRAG services that run directly on your local machine instead of a container.

  • Reset containers

    -
    warning

    This is a destructive action that destroys and recreates all of your OpenRAG containers.

    -

    To destroy and recreate your OpenRAG containers, go to the TUI Status menu, and then click Reset.

    -

    The Reset function runs two commands. First, it stops and removes all containers, volumes, and local images:

    -
    docker compose down --volumes --remove-orphans --rmi local
    -

    Then, it removes any additional Docker objects with docker system prune -f.

    -

    If you reset your containers as part of reinstalling OpenRAG, continue the reinstallation process after resetting the containers.

    +

    Reset your OpenRAG deployment by recreating the containers and removing some related data.

    +
    warning

    This is a destructive action that destroys the following:

      +
    • All OpenRAG containers, volumes, and local images
    • +
    • Any additional Docker objects
    • +
    • The contents of OpenRAG's config and ./opensearch-data directories
    • +
    • The conversations.json file
    • +

    This operation doesn't remove the .env file or the contents of the ./openrag-documents directory.

    +
      +
    1. +

      To destroy and recreate your OpenRAG containers, go to the TUI Status menu, and then click Factory Reset.

      +

      This function runs the following commands and deletes the contents of OpenRAG's config and ./opensearch-data directories.

      +
      docker compose down --volumes --remove-orphans --rmi local
      docker system prune -f
      +
    2. +
    3. +

      If you reset your containers as part of reinstalling OpenRAG, continue the reinstallation process after resetting the containers. +Otherwise, in the TUI Setup menu, repeat the setup process to start the services and launch the OpenRAG app. Your OpenRAG passwords, OAuth credentials (if previously set), and onboarding configuration are restored from the .env file.

      +
    4. +

    Start all services

    -

    On the TUI main page, click Start All Services to start the OpenRAG containers and launch OpenRAG itself.

    +

    Through the TUI, you can view and manage OpenRAG services that run in containers and directly on your local machine.

    +

    Start containers

    +

    On the TUI main page or the Setup menu, click Start All Services to start the OpenRAG containers and launch OpenRAG itself.

    When you start all services, the following processes happen:

    1. @@ -366,6 +378,10 @@ You can also click Stop or Restart to stop and

      OpenRAG deploys the containers with docker compose up -d.

    +

    Start native services (Docling)

    +

    A native service in OpenRAG is a service that runs locally on your machine, not within a container. For example, the docling serve process is an OpenRAG native service because this document processing service runs on your local machine, separate from the OpenRAG containers.

    +

    From the Status menu, you can view the status, port, and process ID (PID) of the OpenRAG native services. +You can also click Stop or Restart to stop and start OpenRAG native services.

    Upgrade OpenRAG

    To upgrade OpenRAG, upgrade the OpenRAG Python package, and then upgrade the OpenRAG containers.

    This is a two part process because upgrading the OpenRAG Python package updates the TUI and Python code, but the container versions are controlled by environment variables in your .env file.

    @@ -432,30 +448,29 @@ The TUI warns you if it detects a version mismatch.

    Reinstall OpenRAG

    -

    To reinstall OpenRAG with a completely fresh setup:

    +

    Reset your OpenRAG deployment by recreating the containers and, optionally, removing related data:

    1. -

      In the TUI Status menu, reset your containers to destroy the existing OpenRAG containers and their data.

      +

      In the TUI, reset your containers to destroy the following:

      +
        +
      • All existing OpenRAG containers, volumes, and local images
      • +
      • Any additional Docker objects
      • +
      • The contents of OpenRAG's config and ./opensearch-data directories
      • +
      • The conversations.json file
      • +
    2. -

      Optional: Delete your project's .env file.

      -

      The Reset operation doesn't remove your project's .env file, so your passwords, API keys, and OAuth settings can be preserved. -If you delete the .env file, you must run the Set up OpenRAG with the TUI process again to create a new configuration file.

      +

      Optional: Remove data that wasn't deleted by the Factory Reset operation. For a completely fresh installation, delete all of this data.

      +
        +
      • OpenRAG's .env file: Contains your OpenRAG configuration, including OpenRAG passwords, API keys, OAuth settings, and other environment variables. If you delete this file, you must either repeat the setup process to create a new .env file, or add a populated .env file to your OpenRAG installation directory before restarting OpenRAG.
      • +
      • The contents of the ./openrag-documents directory: Contains documents that you uploaded to OpenRAG. Delete these files to prevent documents from being reingested to your knowledge base after restarting OpenRAG. However, you might want to preserve OpenRAG's default documents.
      • +
    3. -

      Optional: Delete your OpenSearch knowledge base by deleting the contents of the ./opensearch-data folder in your OpenRAG installation directory.

      +

      In the TUI Setup menu, repeat the setup process to configure OpenRAG, restart the services, and launch the OpenRAG app, and repeat application onboarding. +If OpenRAG detects a .env file, it automatically populates any OpenRAG passwords, OAuth credentials, and onboarding configuration set in that file.

    4. -
    5. -

      In the TUI Setup menu, repeat the Basic Setup process:

      -
        -
      1. Click Start All Services to pull container images and start them.
      2. -
      3. Under Native Services, click Start to start the Docling service.
      4. -
      5. Click Open App to open the OpenRAG application.
      6. -
      7. Continue with application onboarding.
      8. -
      -
    6. -
    -

    If reinstalling OpenRAG and deleting the .env file doesn't reset setup or onboarding, see Reinstalling OpenRAG doesn't reset onboarding.