openrag/assets/js/749371cc.bcff9550.js
2026-01-17 00:29:31 +00:00

1 line
No EOL
42 KiB
JavaScript

"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[2272],{309:(e,n,r)=>{r.d(n,{Ay:()=>l,RM:()=>o});var s=r(4848),t=r(8453);const o=[];function i(e){const n={a:"a",code:"code",li:"li",p:"p",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Install ",(0,s.jsx)(n.a,{href:"https://docs.astral.sh/uv/getting-started/installation/",children:"uv"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Install ",(0,s.jsx)(n.a,{href:"https://podman.io/docs/installation",children:"Podman"})," (recommended) or ",(0,s.jsx)(n.a,{href:"https://docs.docker.com/get-docker/",children:"Docker"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["The OpenRAG team recommends, at minimum, 8 GB of RAM for container VMs.\nHowever, if you plan to upload large files regularly, more RAM is recommended.\nFor more information, see ",(0,s.jsx)(n.a,{href:"/support/troubleshoot",children:"Troubleshoot OpenRAG"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Install ",(0,s.jsx)(n.a,{href:"https://docs.podman.io/en/latest/markdown/podman-compose.1.html",children:(0,s.jsx)(n.code,{children:"podman-compose"})})," or ",(0,s.jsx)(n.a,{href:"https://docs.docker.com/compose/install/",children:"Docker Compose"}),".\nTo use Docker Compose with Podman, you must alias Docker Compose commands to Podman commands."]}),"\n"]}),"\n"]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}},887:(e,n,r)=>{r.d(n,{Ay:()=>h,RM:()=>c});var s=r(4848),t=r(8453),o=r(9179),i=r(1470),l=r(9365),a=r(3059);const c=[{value:"Complete the application onboarding process",id:"application-onboarding",level:2},...a.RM];function d(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",strong:"strong",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.h2,{id:"application-onboarding",children:"Complete the application onboarding process"}),"\n",(0,s.jsxs)(n.p,{children:["The first time you start the OpenRAG application, you must complete the application onboarding process to select language and embedding models that are essential for OpenRAG features like the ",(0,s.jsx)(n.a,{href:"/chat",children:(0,s.jsx)(n.strong,{children:"Chat"})}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["Some of these variables, such as the embedding models, can be changed seamlessly after onboarding.\nOthers are immutable and require you to destroy and recreate the OpenRAG containers.\nFor more information, see the ",(0,s.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG environment variables reference"}),"."]}),"\n",(0,s.jsx)(n.p,{children:"You can use different providers for your language model and embedding model, such as Anthropic for the language model and OpenAI for the embedding model.\nAdditionally, you can set multiple embedding models."}),"\n",(0,s.jsx)(n.p,{children:"You only need to complete onboarding for your preferred providers."}),"\n",(0,s.jsxs)(i.A,{groupId:"Provider",children:[(0,s.jsxs)(l.A,{value:"Anthropic",label:"Anthropic",default:!0,children:[(0,s.jsx)(n.admonition,{type:"info",children:(0,s.jsx)(n.p,{children:"Anthropic doesn't provide embedding models. If you select Anthropic for your language model, you must select a different provider for the embedding model."})}),(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Enter your Anthropic API key, or enable ",(0,s.jsx)(n.strong,{children:"Use environment API key"})," to pull the key from your ",(0,s.jsxs)(n.a,{href:"/reference/configuration",children:["OpenRAG ",(0,s.jsx)(n.code,{children:".env"})," file"]}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Under ",(0,s.jsx)(n.strong,{children:"Advanced settings"}),", select the language model that you want to use."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Select a provider for embeddings, provide the required information, and then select the embedding model you want to use.\nFor information about another provider's credentials and settings, see the instructions for that provider."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["After you configure the embedding model, OpenRAG uses your credentials and models to ingest some ",(0,s.jsx)(n.a,{href:"/knowledge#default-documents",children:"initial documents"}),". This tests the connection, and it allows you to ask OpenRAG about itself in the ",(0,s.jsx)(n.a,{href:"/chat",children:(0,s.jsx)(n.strong,{children:"Chat"})}),".\nIf there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.\nVerify that the credential is valid and has access to the selected model, and then click ",(0,s.jsx)(n.strong,{children:"Complete"})," to retry ingestion."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Continue through the overview slides for a brief introduction to OpenRAG, or click ",(0,s.jsx)(o.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Skip overview"}),".\nThe overview demonstrates some basic functionality that is covered in the ",(0,s.jsx)(n.a,{href:"/quickstart#chat-with-documents",children:"quickstart"})," and in other parts of the OpenRAG documentation."]}),"\n"]}),"\n"]})]}),(0,s.jsx)(l.A,{value:"IBM watsonx.ai",label:"IBM watsonx.ai",children:(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["For ",(0,s.jsx)(n.strong,{children:"watsonx.ai API Endpoint"}),", select the base URL for your watsonx.ai model deployment."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Enter your watsonx.ai deployment's project ID and API key."}),"\n",(0,s.jsxs)(n.p,{children:["You can enable ",(0,s.jsx)(n.strong,{children:"Use environment API key"})," to pull the key from your ",(0,s.jsxs)(n.a,{href:"/reference/configuration",children:["OpenRAG ",(0,s.jsx)(n.code,{children:".env"})," file"]}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Under ",(0,s.jsx)(n.strong,{children:"Advanced settings"}),", select the language model that you want to use."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Select a provider for embeddings, provide the required information, and then select the embedding model you want to use.\nFor information about another provider's credentials and settings, see the instructions for that provider."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["After you configure the embedding model, OpenRAG uses your credentials and models to ingest some ",(0,s.jsx)(n.a,{href:"/knowledge#default-documents",children:"initial documents"}),". This tests the connection, and it allows you to ask OpenRAG about itself in the ",(0,s.jsx)(n.a,{href:"/chat",children:(0,s.jsx)(n.strong,{children:"Chat"})}),".\nIf there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.\nVerify that the credentials are valid and have access to the selected model, and then click ",(0,s.jsx)(n.strong,{children:"Complete"})," to retry ingestion."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Continue through the overview slides for a brief introduction to OpenRAG, or click ",(0,s.jsx)(o.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Skip overview"}),".\nThe overview demonstrates some basic functionality that is covered in the ",(0,s.jsx)(n.a,{href:"/quickstart#chat-with-documents",children:"quickstart"})," and in other parts of the OpenRAG documentation."]}),"\n"]}),"\n"]})}),(0,s.jsxs)(l.A,{value:"Ollama",label:"Ollama",children:[(0,s.jsx)(n.p,{children:"Using Ollama as your language and embedding model provider offers greater flexibility and configuration options for hosting models.\nHowever, it requires additional setup because Ollama isn't included with OpenRAG.\nYou must deploy Ollama separately if you want to use Ollama as a model provider."}),(0,s.jsx)(n.admonition,{type:"info",children:(0,s.jsx)(a.Ay,{})}),(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Install Ollama locally or on a remote server"}),", or ",(0,s.jsx)(n.a,{href:"https://docs.ollama.com/cloud",children:"run models in Ollama Cloud"}),"."]}),"\n",(0,s.jsx)(n.p,{children:"If you are running a remote server, it must be accessible from your OpenRAG deployment."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"In the OpenRAG onboarding dialog, enter your Ollama server's base URL:"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Local Ollama server"}),": Enter your Ollama server's base URL and port. The default Ollama server address is ",(0,s.jsx)(n.code,{children:"http://localhost:11434"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Ollama Cloud"}),": Because Ollama Cloud models run at the same address as a local Ollama server and automatically offload to Ollama's cloud service, you can use the same base URL and port as you would for a local Ollama server. The default address is ",(0,s.jsx)(n.code,{children:"http://localhost:11434"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Remote server"}),": Enter your remote Ollama server's base URL and port, such as ",(0,s.jsx)(n.code,{children:"http://your-remote-server:11434"}),"."]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Select the language model that your Ollama server is running."}),"\n",(0,s.jsx)(n.p,{children:"If your server isn't running any language models, you must either deploy a language model on your Ollama server, or use another provider for the language model."}),"\n",(0,s.jsx)(n.p,{children:"Language model and embedding model selections are independent.\nYou can use the same or different servers for each model."}),"\n",(0,s.jsx)(n.p,{children:"To use different providers for each model, you must configure both providers, and select the relevant model for each provider."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Select a provider for embeddings, provide the required information, and then select the embedding model you want to use.\nFor information about another provider's credentials and settings, see the instructions for that provider."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["After you configure the embedding model, OpenRAG uses your credentials and models to ingest some ",(0,s.jsx)(n.a,{href:"/knowledge#default-documents",children:"initial documents"}),". This tests the connection, and it allows you to ask OpenRAG about itself in the ",(0,s.jsx)(n.a,{href:"/chat",children:(0,s.jsx)(n.strong,{children:"Chat"})}),".\nIf there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.\nVerify that the server address is valid, and that the selected model is running on the server.\nThen, click ",(0,s.jsx)(n.strong,{children:"Complete"})," to retry ingestion."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Continue through the overview slides for a brief introduction to OpenRAG, or click ",(0,s.jsx)(o.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Skip overview"}),".\nThe overview demonstrates some basic functionality that is covered in the ",(0,s.jsx)(n.a,{href:"/quickstart#chat-with-documents",children:"quickstart"})," and in other parts of the OpenRAG documentation."]}),"\n"]}),"\n"]})]}),(0,s.jsx)(l.A,{value:"OpenAI",label:"OpenAI (default)",children:(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Enter your OpenAI API key, or enable ",(0,s.jsx)(n.strong,{children:"Use environment API key"})," to pull the key from your ",(0,s.jsxs)(n.a,{href:"/reference/configuration",children:["OpenRAG ",(0,s.jsx)(n.code,{children:".env"})," file"]}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Under ",(0,s.jsx)(n.strong,{children:"Advanced settings"}),", select the language model that you want to use."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Select a provider for embeddings, provide the required information, and then select the embedding model you want to use.\nFor information about another provider's credentials and settings, see the instructions for that provider."}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Complete"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["After you configure the embedding model, OpenRAG uses your credentials and models to ingest some ",(0,s.jsx)(n.a,{href:"/knowledge#default-documents",children:"initial documents"}),". This tests the connection, and it allows you to ask OpenRAG about itself in the ",(0,s.jsx)(n.a,{href:"/chat",children:(0,s.jsx)(n.strong,{children:"Chat"})}),".\nIf there is a problem with the model configuration, an error occurs and you are redirected back to the application onboarding screen.\nVerify that the credential is valid and has access to the selected model, and then click ",(0,s.jsx)(n.strong,{children:"Complete"})," to retry ingestion."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Continue through the overview slides for a brief introduction to OpenRAG, or click ",(0,s.jsx)(o.A,{name:"ArrowRight","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Skip overview"}),".\nThe overview demonstrates some basic functionality that is covered in the ",(0,s.jsx)(n.a,{href:"/quickstart#chat-with-documents",children:"quickstart"})," and in other parts of the OpenRAG documentation."]}),"\n"]}),"\n"]})})]})]})}function h(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(d,{...e})}):d(e)}},927:(e,n,r)=>{r.d(n,{Ay:()=>l,RM:()=>o});var s=r(4848),t=r(8453);const o=[{value:"Next steps",id:"next-steps",level:2}];function i(e){const n={a:"a",h2:"h2",li:"li",strong:"strong",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.h2,{id:"next-steps",children:"Next steps"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["Try some of OpenRAG's core features in the ",(0,s.jsx)(n.a,{href:"/quickstart#chat-with-documents",children:"quickstart"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:["Learn how to ",(0,s.jsx)(n.a,{href:"/manage-services",children:"manage OpenRAG services"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.a,{href:"/ingestion",children:"Upload documents"}),", and then use the ",(0,s.jsx)(n.a,{href:"/chat",children:(0,s.jsx)(n.strong,{children:"Chat"})})," to explore your data."]}),"\n"]})]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}},1470:(e,n,r)=>{r.d(n,{A:()=>w});var s=r(6540),t=r(4164),o=r(7559),i=r(3104),l=r(6347),a=r(205),c=r(7485),d=r(1682),h=r(679);function u(e){return s.Children.toArray(e).filter(e=>"\n"!==e).map(e=>{if(!e||(0,s.isValidElement)(e)&&function(e){const{props:n}=e;return!!n&&"object"==typeof n&&"value"in n}(e))return e;throw new Error(`Docusaurus error: Bad <Tabs> child <${"string"==typeof e.type?e.type:e.type.name}>: all children of the <Tabs> component should be <TabItem>, and every <TabItem> should have a unique "value" prop.`)})?.filter(Boolean)??[]}function p(e){const{values:n,children:r}=e;return(0,s.useMemo)(()=>{const e=n??function(e){return u(e).map(({props:{value:e,label:n,attributes:r,default:s}})=>({value:e,label:n,attributes:r,default:s}))}(r);return function(e){const n=(0,d.XI)(e,(e,n)=>e.value===n.value);if(n.length>0)throw new Error(`Docusaurus error: Duplicate values "${n.map(e=>e.value).join(", ")}" found in <Tabs>. Every value needs to be unique.`)}(e),e},[n,r])}function m({value:e,tabValues:n}){return n.some(n=>n.value===e)}function x({queryString:e=!1,groupId:n}){const r=(0,l.W6)(),t=function({queryString:e=!1,groupId:n}){if("string"==typeof e)return e;if(!1===e)return null;if(!0===e&&!n)throw new Error('Docusaurus error: The <Tabs> component groupId prop is required if queryString=true, because this value is used as the search param name. You can also provide an explicit value such as queryString="my-search-param".');return n??null}({queryString:e,groupId:n});return[(0,c.aZ)(t),(0,s.useCallback)(e=>{if(!t)return;const n=new URLSearchParams(r.location.search);n.set(t,e),r.replace({...r.location,search:n.toString()})},[t,r])]}function j(e){const{defaultValue:n,queryString:r=!1,groupId:t}=e,o=p(e),[i,l]=(0,s.useState)(()=>function({defaultValue:e,tabValues:n}){if(0===n.length)throw new Error("Docusaurus error: the <Tabs> component requires at least one <TabItem> children component");if(e){if(!m({value:e,tabValues:n}))throw new Error(`Docusaurus error: The <Tabs> has a defaultValue "${e}" but none of its children has the corresponding value. Available values are: ${n.map(e=>e.value).join(", ")}. If you intend to show no default tab, use defaultValue={null} instead.`);return e}const r=n.find(e=>e.default)??n[0];if(!r)throw new Error("Unexpected error: 0 tabValues");return r.value}({defaultValue:n,tabValues:o})),[c,d]=x({queryString:r,groupId:t}),[u,j]=function({groupId:e}){const n=function(e){return e?`docusaurus.tab.${e}`:null}(e),[r,t]=(0,h.Dv)(n);return[r,(0,s.useCallback)(e=>{n&&t.set(e)},[n,t])]}({groupId:t}),f=(()=>{const e=c??u;return m({value:e,tabValues:o})?e:null})();(0,a.A)(()=>{f&&l(f)},[f]);return{selectedValue:i,selectValue:(0,s.useCallback)(e=>{if(!m({value:e,tabValues:o}))throw new Error(`Can't select invalid tab value=${e}`);l(e),d(e),j(e)},[d,j,o]),tabValues:o}}var f=r(2303);const g={tabList:"tabList__CuJ",tabItem:"tabItem_LNqP"};var y=r(4848);function v({className:e,block:n,selectedValue:r,selectValue:s,tabValues:o}){const l=[],{blockElementScrollPositionUntilNextRender:a}=(0,i.a_)(),c=e=>{const n=e.currentTarget,t=l.indexOf(n),i=o[t].value;i!==r&&(a(n),s(i))},d=e=>{let n=null;switch(e.key){case"Enter":c(e);break;case"ArrowRight":{const r=l.indexOf(e.currentTarget)+1;n=l[r]??l[0];break}case"ArrowLeft":{const r=l.indexOf(e.currentTarget)-1;n=l[r]??l[l.length-1];break}}n?.focus()};return(0,y.jsx)("ul",{role:"tablist","aria-orientation":"horizontal",className:(0,t.A)("tabs",{"tabs--block":n},e),children:o.map(({value:e,label:n,attributes:s})=>(0,y.jsx)("li",{role:"tab",tabIndex:r===e?0:-1,"aria-selected":r===e,ref:e=>{l.push(e)},onKeyDown:d,onClick:c,...s,className:(0,t.A)("tabs__item",g.tabItem,s?.className,{"tabs__item--active":r===e}),children:n??e},e))})}function b({lazy:e,children:n,selectedValue:r}){const o=(Array.isArray(n)?n:[n]).filter(Boolean);if(e){const e=o.find(e=>e.props.value===r);return e?(0,s.cloneElement)(e,{className:(0,t.A)("margin-top--md",e.props.className)}):null}return(0,y.jsx)("div",{className:"margin-top--md",children:o.map((e,n)=>(0,s.cloneElement)(e,{key:n,hidden:e.props.value!==r}))})}function A(e){const n=j(e);return(0,y.jsxs)("div",{className:(0,t.A)(o.G.tabs.container,"tabs-container",g.tabList),children:[(0,y.jsx)(v,{...n,...e}),(0,y.jsx)(b,{...n,...e})]})}function w(e){const n=(0,f.A)();return(0,y.jsx)(A,{...e,children:u(e.children)},String(n))}},3059:(e,n,r)=>{r.d(n,{Ay:()=>l,RM:()=>o});var s=r(4848),t=r(8453);const o=[];function i(e){const n={a:"a",code:"code",li:"li",p:"p",strong:"strong",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"OpenRAG isn't guaranteed to be compatible with all models that are available through Ollama.\nFor example, some models might produce unexpected results, such as JSON-formatted output instead of natural language responses, and some models aren't appropriate for the types of tasks that OpenRAG performs, such as those that generate media."}),"\n",(0,s.jsx)(n.p,{children:"The OpenRAG team recommends the following models when using Ollama as your model provider:"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Language models"}),": ",(0,s.jsx)(n.code,{children:"gpt-oss:20b"})," or ",(0,s.jsx)(n.code,{children:"mistral-nemo:12b"}),"."]}),"\n",(0,s.jsxs)(n.p,{children:["If you choose ",(0,s.jsx)(n.code,{children:"gpt-oss:20b"}),", consider using Ollama Cloud or running Ollama on a remote machine because this model requires at least 16GB of RAM."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Embedding models"}),": ",(0,s.jsx)(n.a,{href:"https://ollama.com/library/nomic-embed-text",children:(0,s.jsx)(n.code,{children:"nomic-embed-text:latest"})}),", ",(0,s.jsx)(n.code,{children:"mxbai-embed-large:latest"}),", or ",(0,s.jsx)(n.code,{children:"embeddinggemma:latest"}),"."]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.p,{children:["You can experiment with other models, but if you encounter issues that you are unable to resolve through other RAG best practices (like context filters and prompt engineering), try switching to one of the recommended models.\nYou can submit an ",(0,s.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/issues",children:"OpenRAG GitHub issue"})," to request support for specific models."]})]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}},3351:(e,n,r)=>{r.d(n,{Ay:()=>a,RM:()=>i});var s=r(4848),t=r(8453),o=r(3059);const i=[...o.RM];function l(e){const n={a:"a",admonition:"admonition",li:"li",p:"p",strong:"strong",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Gather the credentials and connection details for your preferred model providers.\nYou must have access to at least one language model and one embedding model.\nIf a provider offers both types, you can use the same provider for both models.\nIf a provider offers only one type, you must select two providers."}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"OpenAI"}),": Create an ",(0,s.jsx)(n.a,{href:"https://platform.openai.com/api-keys",children:"OpenAI API key"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Anthropic"}),": Create an ",(0,s.jsx)(n.a,{href:"https://www.anthropic.com/docs/api/reference",children:"Anthropic API key"}),".\nAnthropic provides language models only; you must select an additional provider for embeddings."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"IBM watsonx.ai"}),": Get your watsonx.ai API endpoint, IBM project ID, and IBM API key from your watsonx deployment."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:[(0,s.jsx)(n.strong,{children:"Ollama"}),": Deploy an ",(0,s.jsx)(n.a,{href:"https://docs.ollama.com/",children:"Ollama instance and models"})," locally, in the cloud, or on a remote server. Then, get your Ollama server's base URL and the names of the models that you want to use."]}),"\n",(0,s.jsx)(n.admonition,{type:"info",children:(0,s.jsx)(o.Ay,{})}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Optional: Install GPU support with an NVIDIA GPU, ",(0,s.jsx)(n.a,{href:"https://docs.nvidia.com/cuda/",children:"CUDA"})," support, and compatible NVIDIA drivers on the OpenRAG host machine.\nIf you don't have GPU capabilities, OpenRAG provides an alternate CPU-only deployment that is suitable for most use cases.\nThe default CPU-only deployment doesn't prevent you from using GPU acceleration in external services, such as Ollama servers."]}),"\n"]}),"\n"]})}function a(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(l,{...e})}):l(e)}},4042:(e,n,r)=>{r.d(n,{Ay:()=>l,RM:()=>o});var s=r(4848),t=r(8453);const o=[];function i(e){const n={a:"a",li:"li",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["Install ",(0,s.jsx)(n.a,{href:"https://www.python.org/downloads/release/python-3100/",children:"Python"})," version 3.13 or later."]}),"\n"]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}},5788:(e,n,r)=>{r.r(n),r.d(n,{assets:()=>x,contentTitle:()=>m,default:()=>g,frontMatter:()=>p,metadata:()=>s,toc:()=>j});const s=JSON.parse('{"id":"get-started/docker","title":"Deploy OpenRAG with self-managed services","description":"To manage your own OpenRAG services, deploy OpenRAG with Docker or Podman.","source":"@site/docs/get-started/docker.mdx","sourceDirName":"get-started","slug":"/docker","permalink":"/docker","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/get-started/docker.mdx","tags":[],"version":"current","frontMatter":{"title":"Deploy OpenRAG with self-managed services","slug":"/docker"},"sidebar":"tutorialSidebar","previous":{"title":"Install OpenRAG on Windows","permalink":"/install-windows"},"next":{"title":"Upgrade OpenRAG","permalink":"/upgrade"}}');var t=r(4848),o=r(8453),i=(r(1470),r(9365),r(887)),l=r(3351),a=r(309),c=r(6149),d=r(4042),h=r(927),u=(r(3059),r(5818));const p={title:"Deploy OpenRAG with self-managed services",slug:"/docker"},m=void 0,x={},j=[{value:"Prerequisites",id:"prerequisites",level:2},...c.RM,...d.RM,...a.RM,...l.RM,{value:"Prepare your deployment",id:"setup",level:2},{value:"Start services",id:"start-services",level:2},...u.RM,...i.RM,...h.RM];function f(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",table:"table",tbody:"tbody",td:"td",th:"th",thead:"thead",tr:"tr",ul:"ul",...(0,o.R)(),...e.components};return(0,t.jsxs)(t.Fragment,{children:[(0,t.jsx)(n.p,{children:"To manage your own OpenRAG services, deploy OpenRAG with Docker or Podman."}),"\n",(0,t.jsxs)(n.p,{children:["Use this installation method if you don't want to ",(0,t.jsx)(n.a,{href:"/tui",children:"use the Terminal User Interface (TUI)"}),", or you need to run OpenRAG in an environment where using the TUI is unfeasible."]}),"\n",(0,t.jsx)(n.h2,{id:"prerequisites",children:"Prerequisites"}),"\n",(0,t.jsx)(c.Ay,{}),"\n",(0,t.jsx)(d.Ay,{}),"\n",(0,t.jsx)(a.Ay,{}),"\n",(0,t.jsx)(l.Ay,{}),"\n",(0,t.jsx)(n.h2,{id:"setup",children:"Prepare your deployment"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Clone the OpenRAG repository:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"git clone https://github.com/langflow-ai/openrag.git\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Change to the root of the cloned repository:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"cd openrag\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Install dependencies:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"uv sync\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Create a ",(0,t.jsx)(n.code,{children:".env"})," file at the root of the cloned repository."]}),"\n",(0,t.jsxs)(n.p,{children:["You can create an empty file or copy the repository's ",(0,t.jsx)(n.a,{href:"https://github.com/langflow-ai/openrag/blob/main/.env.example",children:(0,t.jsx)(n.code,{children:".env.example"})})," file.\nThe example file contains some of the ",(0,t.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG environment variables"})," to get you started with configuring your deployment."]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"cp .env.example .env\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Edit the ",(0,t.jsx)(n.code,{children:".env"})," file to configure your deployment using ",(0,t.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG environment variables"}),".\nThe OpenRAG Docker Compose files pull values from your ",(0,t.jsx)(n.code,{children:".env"})," file to configure the OpenRAG containers.\nThe following variables are required or recommended:"]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.strong,{children:[(0,t.jsx)(n.code,{children:"OPENSEARCH_PASSWORD"})," (Required)"]}),": Sets the OpenSearch administrator password. It must adhere to the ",(0,t.jsx)(n.a,{href:"https://docs.opensearch.org/latest/security/configuration/demo-configuration/#setting-up-a-custom-admin-password",children:"OpenSearch password complexity requirements"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:(0,t.jsx)(n.code,{children:"LANGFLOW_SUPERUSER"})}),": The username for the Langflow administrator user. If ",(0,t.jsx)(n.code,{children:"LANGFLOW_SUPERUSER"})," isn't set, then the default value is ",(0,t.jsx)(n.code,{children:"admin"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.strong,{children:[(0,t.jsx)(n.code,{children:"LANGFLOW_SUPERUSER_PASSWORD"})," (Strongly recommended)"]}),": Sets the Langflow administrator password, and determines the Langflow server's default authentication mode. If ",(0,t.jsx)(n.code,{children:"LANGFLOW_SUPERUSER_PASSWORD"})," isn't set, then the Langflow server starts without authentication enabled. For more information, see ",(0,t.jsx)(n.a,{href:"/reference/configuration#langflow-settings",children:"Langflow settings"}),"."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsxs)(n.strong,{children:[(0,t.jsx)(n.code,{children:"LANGFLOW_SECRET_KEY"})," (Strongly recommended)"]}),": A secret encryption key for internal Langflow operations. It is recommended to ",(0,t.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key",children:"generate your own Langflow secret key"}),". If ",(0,t.jsx)(n.code,{children:"LANGFLOW_SECRET_KEY"})," isn't set, then Langflow generates a secret key automatically."]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"Model provider credentials"}),": Provide credentials for your preferred model providers. If none of these are set in the ",(0,t.jsx)(n.code,{children:".env"})," file, you must configure at least one provider during the ",(0,t.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding process"}),"."]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.code,{children:"OPENAI_API_KEY"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.code,{children:"ANTHROPIC_API_KEY"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.code,{children:"OLLAMA_ENDPOINT"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.code,{children:"WATSONX_API_KEY"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.code,{children:"WATSONX_ENDPOINT"})}),"\n",(0,t.jsx)(n.li,{children:(0,t.jsx)(n.code,{children:"WATSONX_PROJECT_ID"})}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"OAuth provider credentials"}),": To upload documents from external storage, such as Google Drive, set the required OAuth credentials for the connectors that you want to use. You can ",(0,t.jsx)(n.a,{href:"/ingestion#oauth-ingestion",children:"manage OAuth credentials"})," later, but it is recommended to configure them during initial set up so you don't have to rebuild the containers."]}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Google"}),": Provide your Google OAuth Client ID and Google OAuth Client Secret. You can generate these in the ",(0,t.jsx)(n.a,{href:"https://console.cloud.google.com/apis/credentials",children:"Google Cloud Console"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://developers.google.com/identity/protocols/oauth2",children:"Google OAuth client documentation"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Microsoft"}),": For the Microsoft OAuth Client ID and Microsoft OAuth Client Secret, provide ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/app-registration?view=odsp-graph-online",children:"Azure application registration credentials for SharePoint and OneDrive"}),". For more information, see the ",(0,t.jsx)(n.a,{href:"https://learn.microsoft.com/en-us/onedrive/developer/rest-api/getting-started/graph-oauth",children:"Microsoft Graph OAuth client documentation"}),"."]}),"\n",(0,t.jsxs)(n.li,{children:[(0,t.jsx)(n.strong,{children:"Amazon"}),": Provide your AWS Access Key ID and AWS Secret Access Key with access to your S3 instance. For more information, see the AWS documentation on ",(0,t.jsx)(n.a,{href:"https://docs.aws.amazon.com/singlesignon/latest/userguide/manage-your-applications.html",children:"Configuring access to AWS applications"}),"."]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,t.jsxs)(n.p,{children:["For more information and variables, see ",(0,t.jsx)(n.a,{href:"/reference/configuration",children:"OpenRAG environment variables"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.h2,{id:"start-services",children:"Start services"}),"\n",(0,t.jsxs)(n.ol,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["To use the default Docling Serve implementation, start ",(0,t.jsx)(n.code,{children:"docling serve"})," on port 5001 on the host machine using the included script:"]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py start --port 5001\n"})}),"\n",(0,t.jsxs)(n.p,{children:["Docling cannot run inside a Docker container due to system-level dependencies, so you must manage it as a separate service on the host machine.\nFor more information, see ",(0,t.jsx)(n.a,{href:"/manage-services#start-native-services",children:"Stop, start, and inspect native services"}),"."]}),"\n",(0,t.jsxs)(n.p,{children:["Port 5001 is required to deploy OpenRAG successfully; don't use a different port.\nAdditionally, this enables the ",(0,t.jsx)(n.a,{href:"https://opensource.apple.com/projects/mlx/",children:"MLX framework"})," for accelerated performance on Apple Silicon Mac machines."]}),"\n",(0,t.jsx)(n.admonition,{type:"tip",children:(0,t.jsxs)(n.p,{children:["If you don't want to use the default Docling Serve implementation, see ",(0,t.jsx)(n.a,{href:"/knowledge#select-a-docling-implementation",children:"Select a Docling implementation"}),"."]})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Confirm ",(0,t.jsx)(n.code,{children:"docling serve"})," is running."]}),"\n",(0,t.jsx)(n.p,{children:"The following command checks the status of the default Docling Serve implementation:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"uv run python scripts/docling_ctl.py status\n"})}),"\n",(0,t.jsxs)(n.p,{children:["If ",(0,t.jsx)(n.code,{children:"docling serve"})," is running, the output includes the status, address, and process ID (PID):"]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",children:"Status: running\nEndpoint: http://127.0.0.1:5001\nDocs: http://127.0.0.1:5001/docs\nPID: 27746\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Deploy the OpenRAG containers locally using the appropriate Docker Compose configuration for your environment:"}),"\n",(0,t.jsxs)(n.ul,{children:["\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"CPU-only deployment"})," (default, recommended): If your host machine doesn't have NVIDIA GPU support, use the base ",(0,t.jsx)(n.code,{children:"docker-compose.yml"})," file:"]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",metastring:'title="Docker"',children:"docker compose up -d\n"})}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",metastring:'title="Podman"',children:"podman compose up -d\n"})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:[(0,t.jsx)(n.strong,{children:"GPU-accelerated deployment"}),": If your host machine has an NVIDIA GPU with CUDA support and compatible NVIDIA drivers, use the base ",(0,t.jsx)(n.code,{children:"docker-compose.yml"})," file with the ",(0,t.jsx)(n.code,{children:"docker-compose.gpu.yml"})," override:"]}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",metastring:'title="Docker"',children:"docker compose -f docker-compose.yml -f docker-compose.gpu.yml up -d\n"})}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",metastring:'title="Podman"',children:"podman compose -f docker-compose.yml -f docker-compose.gpu.yml up -d\n"})}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(n.admonition,{type:"tip",children:(0,t.jsx)(u.Ay,{})}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsx)(n.p,{children:"Wait for the OpenRAG containers to start, and then confirm that all containers are running:"}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",metastring:'title="Docker"',children:"docker compose ps\n"})}),"\n",(0,t.jsx)(n.pre,{children:(0,t.jsx)(n.code,{className:"language-bash",metastring:'title="Podman"',children:"podman compose ps\n"})}),"\n",(0,t.jsx)(n.p,{children:"The OpenRAG Docker Compose files deploy the following containers:"}),"\n",(0,t.jsxs)(n.table,{children:[(0,t.jsx)(n.thead,{children:(0,t.jsxs)(n.tr,{children:[(0,t.jsx)(n.th,{children:"Container Name"}),(0,t.jsx)(n.th,{children:"Default address"}),(0,t.jsx)(n.th,{children:"Purpose"})]})}),(0,t.jsxs)(n.tbody,{children:[(0,t.jsxs)(n.tr,{children:[(0,t.jsx)(n.td,{children:"OpenRAG Backend"}),(0,t.jsx)(n.td,{children:(0,t.jsx)(n.a,{href:"http://localhost:8000",children:"http://localhost:8000"})}),(0,t.jsx)(n.td,{children:"FastAPI server and core functionality."})]}),(0,t.jsxs)(n.tr,{children:[(0,t.jsx)(n.td,{children:"OpenRAG Frontend"}),(0,t.jsx)(n.td,{children:(0,t.jsx)(n.a,{href:"http://localhost:3000",children:"http://localhost:3000"})}),(0,t.jsx)(n.td,{children:"React web interface for user interaction."})]}),(0,t.jsxs)(n.tr,{children:[(0,t.jsx)(n.td,{children:"Langflow"}),(0,t.jsx)(n.td,{children:(0,t.jsx)(n.a,{href:"http://localhost:7860",children:"http://localhost:7860"})}),(0,t.jsxs)(n.td,{children:[(0,t.jsx)(n.a,{href:"/agents",children:"AI workflow engine"}),"."]})]}),(0,t.jsxs)(n.tr,{children:[(0,t.jsx)(n.td,{children:"OpenSearch"}),(0,t.jsx)(n.td,{children:(0,t.jsx)(n.a,{href:"http://localhost:9200",children:"http://localhost:9200"})}),(0,t.jsxs)(n.td,{children:["Datastore for ",(0,t.jsx)(n.a,{href:"/knowledge",children:"knowledge"}),"."]})]}),(0,t.jsxs)(n.tr,{children:[(0,t.jsx)(n.td,{children:"OpenSearch Dashboards"}),(0,t.jsx)(n.td,{children:(0,t.jsx)(n.a,{href:"http://localhost:5601",children:"http://localhost:5601"})}),(0,t.jsx)(n.td,{children:"OpenSearch database administration interface."})]})]})]}),"\n",(0,t.jsx)(n.p,{children:"When the containers are running, you can access your OpenRAG services at their addresses."}),"\n"]}),"\n",(0,t.jsxs)(n.li,{children:["\n",(0,t.jsxs)(n.p,{children:["Access the OpenRAG frontend at ",(0,t.jsx)(n.code,{children:"http://localhost:3000"}),", and then continue with the ",(0,t.jsx)(n.a,{href:"#application-onboarding",children:"application onboarding process"}),"."]}),"\n"]}),"\n"]}),"\n",(0,t.jsx)(i.Ay,{}),"\n",(0,t.jsx)(h.Ay,{})]})}function g(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,t.jsx)(n,{...e,children:(0,t.jsx)(f,{...e})}):f(e)}},5818:(e,n,r)=>{r.d(n,{Ay:()=>l,RM:()=>o});var s=r(4848),t=r(8453);const o=[];function i(e){const n={p:"p",...(0,t.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsx)(n.p,{children:"GPU acceleration isn't required for most use cases.\nOpenRAG's CPU-only deployment doesn't prevent you from using GPU acceleration in external services, such as Ollama servers."}),"\n",(0,s.jsx)(n.p,{children:"GPU acceleration is required only for specific use cases, typically involving customization of the ingestion flows or ingestion logic.\nFor example, writing alternate ingest logic in OpenRAG that uses GPUs directly in the container, or customizing the ingestion flows to use Langflow's Docling component with GPU acceleration instead of OpenRAG's Docling Serve service."})]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}},6149:(e,n,r)=>{r.d(n,{Ay:()=>l,RM:()=>o});var s=r(4848),t=r(8453);const o=[];function i(e){const n={a:"a",li:"li",ul:"ul",...(0,t.R)(),...e.components};return(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:["For Microsoft Windows, you must use the Windows Subsystem for Linux (WSL).\nSee ",(0,s.jsx)(n.a,{href:"/install-windows",children:"Install OpenRAG on Windows"})," before proceeding."]}),"\n"]})}function l(e={}){const{wrapper:n}={...(0,t.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}},9179:(e,n,r)=>{r.d(n,{A:()=>o});r(6540);var s=r(7856),t=r(4848);function o({name:e,...n}){const r=s[e];return r?(0,t.jsx)(r,{...n}):null}},9365:(e,n,r)=>{r.d(n,{A:()=>i});r(6540);var s=r(4164);const t={tabItem:"tabItem_Ymn6"};var o=r(4848);function i({children:e,hidden:n,className:r}){return(0,o.jsx)("div",{role:"tabpanel",className:(0,s.A)(t.tabItem,r),hidden:n,children:e})}}}]);