openrag/assets/js/27b4a875.a11e6996.js
2025-12-16 14:05:59 +00:00

1 line
No EOL
27 KiB
JavaScript

"use strict";(globalThis.webpackChunkopenrag_docs=globalThis.webpackChunkopenrag_docs||[]).push([[3207],{1470:(e,n,t)=>{t.d(n,{A:()=>A});var s=t(6540),o=t(4164),r=t(7559),a=t(3104),i=t(6347),l=t(205),c=t(7485),h=t(1682),d=t(679);function u(e){return s.Children.toArray(e).filter(e=>"\n"!==e).map(e=>{if(!e||(0,s.isValidElement)(e)&&function(e){const{props:n}=e;return!!n&&"object"==typeof n&&"value"in n}(e))return e;throw new Error(`Docusaurus error: Bad <Tabs> child <${"string"==typeof e.type?e.type:e.type.name}>: all children of the <Tabs> component should be <TabItem>, and every <TabItem> should have a unique "value" prop.`)})?.filter(Boolean)??[]}function p(e){const{values:n,children:t}=e;return(0,s.useMemo)(()=>{const e=n??function(e){return u(e).map(({props:{value:e,label:n,attributes:t,default:s}})=>({value:e,label:n,attributes:t,default:s}))}(t);return function(e){const n=(0,h.XI)(e,(e,n)=>e.value===n.value);if(n.length>0)throw new Error(`Docusaurus error: Duplicate values "${n.map(e=>e.value).join(", ")}" found in <Tabs>. Every value needs to be unique.`)}(e),e},[n,t])}function g({value:e,tabValues:n}){return n.some(n=>n.value===e)}function f({queryString:e=!1,groupId:n}){const t=(0,i.W6)(),o=function({queryString:e=!1,groupId:n}){if("string"==typeof e)return e;if(!1===e)return null;if(!0===e&&!n)throw new Error('Docusaurus error: The <Tabs> component groupId prop is required if queryString=true, because this value is used as the search param name. You can also provide an explicit value such as queryString="my-search-param".');return n??null}({queryString:e,groupId:n});return[(0,c.aZ)(o),(0,s.useCallback)(e=>{if(!o)return;const n=new URLSearchParams(t.location.search);n.set(o,e),t.replace({...t.location,search:n.toString()})},[o,t])]}function m(e){const{defaultValue:n,queryString:t=!1,groupId:o}=e,r=p(e),[a,i]=(0,s.useState)(()=>function({defaultValue:e,tabValues:n}){if(0===n.length)throw new Error("Docusaurus error: the <Tabs> component requires at least one <TabItem> children component");if(e){if(!g({value:e,tabValues:n}))throw new Error(`Docusaurus error: The <Tabs> has a defaultValue "${e}" but none of its children has the corresponding value. Available values are: ${n.map(e=>e.value).join(", ")}. If you intend to show no default tab, use defaultValue={null} instead.`);return e}const t=n.find(e=>e.default)??n[0];if(!t)throw new Error("Unexpected error: 0 tabValues");return t.value}({defaultValue:n,tabValues:r})),[c,h]=f({queryString:t,groupId:o}),[u,m]=function({groupId:e}){const n=function(e){return e?`docusaurus.tab.${e}`:null}(e),[t,o]=(0,d.Dv)(n);return[t,(0,s.useCallback)(e=>{n&&o.set(e)},[n,o])]}({groupId:o}),x=(()=>{const e=c??u;return g({value:e,tabValues:r})?e:null})();(0,l.A)(()=>{x&&i(x)},[x]);return{selectedValue:a,selectValue:(0,s.useCallback)(e=>{if(!g({value:e,tabValues:r}))throw new Error(`Can't select invalid tab value=${e}`);i(e),h(e),m(e)},[h,m,r]),tabValues:r}}var x=t(2303);const j={tabList:"tabList__CuJ",tabItem:"tabItem_LNqP"};var y=t(4848);function w({className:e,block:n,selectedValue:t,selectValue:s,tabValues:r}){const i=[],{blockElementScrollPositionUntilNextRender:l}=(0,a.a_)(),c=e=>{const n=e.currentTarget,o=i.indexOf(n),a=r[o].value;a!==t&&(l(n),s(a))},h=e=>{let n=null;switch(e.key){case"Enter":c(e);break;case"ArrowRight":{const t=i.indexOf(e.currentTarget)+1;n=i[t]??i[0];break}case"ArrowLeft":{const t=i.indexOf(e.currentTarget)-1;n=i[t]??i[i.length-1];break}}n?.focus()};return(0,y.jsx)("ul",{role:"tablist","aria-orientation":"horizontal",className:(0,o.A)("tabs",{"tabs--block":n},e),children:r.map(({value:e,label:n,attributes:s})=>(0,y.jsx)("li",{role:"tab",tabIndex:t===e?0:-1,"aria-selected":t===e,ref:e=>{i.push(e)},onKeyDown:h,onClick:c,...s,className:(0,o.A)("tabs__item",j.tabItem,s?.className,{"tabs__item--active":t===e}),children:n??e},e))})}function b({lazy:e,children:n,selectedValue:t}){const r=(Array.isArray(n)?n:[n]).filter(Boolean);if(e){const e=r.find(e=>e.props.value===t);return e?(0,s.cloneElement)(e,{className:(0,o.A)("margin-top--md",e.props.className)}):null}return(0,y.jsx)("div",{className:"margin-top--md",children:r.map((e,n)=>(0,s.cloneElement)(e,{key:n,hidden:e.props.value!==t}))})}function v(e){const n=m(e);return(0,y.jsxs)("div",{className:(0,o.A)(r.G.tabs.container,"tabs-container",j.tabList),children:[(0,y.jsx)(w,{...n,...e}),(0,y.jsx)(b,{...n,...e})]})}function A(e){const n=(0,x.A)();return(0,y.jsx)(v,{...e,children:u(e.children)},String(n))}},2361:(e,n,t)=>{t.r(n),t.d(n,{assets:()=>d,contentTitle:()=>h,default:()=>g,frontMatter:()=>c,metadata:()=>s,toc:()=>u});const s=JSON.parse('{"id":"core-components/chat","title":"Chat in OpenRAG","description":"After you upload documents to your knowledge base, you can use the OpenRAG Chat feature to interact with your knowledge through natural language queries.","source":"@site/docs/core-components/chat.mdx","sourceDirName":"core-components","slug":"/chat","permalink":"/chat","draft":false,"unlisted":false,"editUrl":"https://github.com/openrag/openrag/tree/main/docs/docs/core-components/chat.mdx","tags":[],"version":"current","frontMatter":{"title":"Chat in OpenRAG","slug":"/chat"},"sidebar":"tutorialSidebar","previous":{"title":"Filter knowledge","permalink":"/knowledge-filters"},"next":{"title":"Environment variables","permalink":"/reference/configuration"}}');var o=t(4848),r=t(8453),a=t(9179),i=(t(1470),t(9365),t(7224)),l=t(8401);const c={title:"Chat in OpenRAG",slug:"/chat"},h=void 0,d={},u=[{value:"OpenRAG OpenSearch Agent flow",id:"flow",level:2},{value:"Nudges",id:"nudges",level:2},{value:"Upload documents to the chat",id:"upload-documents-to-the-chat",level:2},...l.RM,{value:"Inspect tool calls and knowledge",id:"inspect-tool-calls-and-knowledge",level:2},{value:"Integrate OpenRAG chat into an application",id:"integrate-openrag-chat-into-an-application",level:2},...i.RM];function p(e){const n={a:"a",admonition:"admonition",code:"code",h2:"h2",img:"img",li:"li",p:"p",strong:"strong",ul:"ul",...(0,r.R)(),...e.components},{Details:s}=n;return s||function(e,n){throw new Error("Expected "+(n?"component":"object")+" `"+e+"` to be defined: you likely forgot to import, pass, or provide it.")}("Details",!0),(0,o.jsxs)(o.Fragment,{children:[(0,o.jsxs)(n.p,{children:["After you ",(0,o.jsx)(n.a,{href:"/ingestion",children:"upload documents to your knowledge base"}),", you can use the OpenRAG ",(0,o.jsx)(a.A,{name:"MessageSquare","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Chat"})," feature to interact with your knowledge through natural language queries."]}),"\n",(0,o.jsxs)(n.p,{children:["The OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"})," uses an LLM-powered agent to understand your queries, retrieve relevant information from your knowledge base, and generate context-aware responses.\nThe agent can also fetch information from URLs and new documents that you provide during the chat session.\nTo limit the knowledge available to the agent, use ",(0,o.jsx)(n.a,{href:"/knowledge-filters",children:"filters"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["The agent can call specialized Model Context Protocol (MCP) tools to extend its capabilities.\nTo add or change the available tools, you must edit the ",(0,o.jsxs)(n.a,{href:"#flow",children:[(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow"]}),"."]}),"\n",(0,o.jsx)(n.admonition,{type:"tip",children:(0,o.jsxs)(n.p,{children:["Try chatting, uploading documents, and modifying chat settings in the ",(0,o.jsx)(n.a,{href:"/quickstart",children:"quickstart"}),"."]})}),"\n",(0,o.jsx)(n.h2,{id:"flow",children:"OpenRAG OpenSearch Agent flow"}),"\n",(0,o.jsxs)(n.p,{children:["When you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow runs in the background to retrieve relevant information from your knowledge base and generate a response."]}),"\n",(0,o.jsxs)(n.p,{children:["If you ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", you'll see that it is comprised of eight components that work together to ingest chat messages, retrieve relevant information from your knowledge base, and then generate responses.\nWhen you inspect this flow, you can edit the components to customize the agent's behavior."]}),"\n",(0,o.jsx)(n.p,{children:(0,o.jsx)(n.img,{alt:"OpenRAG Open Search Agent Flow",src:t(3982).A+"",width:"4084",height:"2176"})}),"\n",(0,o.jsxs)(n.ul,{children:["\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Chat Input"})," component"]}),": This component starts the flow when it receives a chat message. It is connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Input"})," port.\nWhen you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", your chat messages are passed to the ",(0,o.jsx)(n.strong,{children:"Chat Input"})," component, which then sends them to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component for processing."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/agents",children:[(0,o.jsx)(n.strong,{children:"Agent"})," component"]}),": This component orchestrates the entire flow by processing chat messages, searching the knowledge base, and organizing the retrieved information into a cohesive response.\nThe agent's general behavior is defined by the prompt in the ",(0,o.jsx)(n.strong,{children:"Agent Instructions"})," field and the model connected to the ",(0,o.jsx)(n.strong,{children:"Language Model"})," port.\nOne or more specialized tools can be attached to the ",(0,o.jsx)(n.strong,{children:"Tools"})," port to extend the agent's capabilities. In this case, there are two tools: ",(0,o.jsx)(n.strong,{children:"MCP Tools"})," and ",(0,o.jsx)(n.strong,{children:"OpenSearch"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["The ",(0,o.jsx)(n.strong,{children:"Agent"})," component is the star of this flow because it powers decision making, tool calling, and an LLM-driven conversational experience."]}),"\n",(0,o.jsxs)(s,{children:[(0,o.jsx)("summary",{children:"How do agents work?"}),(0,o.jsx)(n.p,{children:"Agents extend Large Language Models (LLMs) by integrating tools, which are functions that provide additional context and enable autonomous task execution. These integrations make agents more specialized and powerful than standalone LLMs."}),(0,o.jsx)(n.p,{children:"Whereas an LLM might generate acceptable, inert responses to general queries and tasks, an agent can leverage the integrated context and tools to provide more relevant responses and even take action. For example, you might create an agent that can access your company's documentation, repositories, and other resources to help your team with tasks that require knowledge of your specific products, customers, and code."}),(0,o.jsx)(n.p,{children:"Agents use LLMs as a reasoning engine to process input, determine which actions to take to address the query, and then generate a response. The response could be a typical text-based LLM response, or it could involve an action, like editing a file, running a script, or calling an external API."}),(0,o.jsx)(n.p,{children:"In an agentic context, tools are functions that the agent can run to perform tasks or access external resources. A function is wrapped as a Tool object with a common interface that the agent understands. Agents become aware of tools through tool registration, which is when the agent is provided a list of available tools typically at agent initialization. The Tool object's description tells the agent what the tool can do so that it can decide whether the tool is appropriate for a given request."})]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-models",children:[(0,o.jsx)(n.strong,{children:"Language Model"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Language Model"})," port, this component provides the base language model driver for the agent. The agent cannot function without a model because the model is used for general knowledge, reasoning, and generating responses."]}),"\n",(0,o.jsx)(n.p,{children:"Different models can change the style and content of the agent's responses, and some models might be better suited for certain tasks than others. If the agent doesn't seem to be handling requests well, try changing the model to see how the responses change. For example, fast models might be good for simple queries, but they might not have the depth of reasoning for complex, multi-faceted queries."}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/mcp-client",children:[(0,o.jsx)(n.strong,{children:"MCP Tools"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Tools"})," port, this component can be used to ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/mcp-server",children:"access any MCP server"})," and the MCP tools provided by that server. In this case, your OpenRAG Langflow instance's ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/concepts-flows#projects",children:(0,o.jsx)(n.strong,{children:"Starter Project"})})," is the MCP server, and the ",(0,o.jsxs)(n.a,{href:"/ingestion#url-flow",children:[(0,o.jsx)(n.strong,{children:"OpenSearch URL Ingestion"})," flow"]})," is the MCP tool.\nThis flow fetches content from URLs, and then stores the content in your OpenRAG OpenSearch knowledge base. By serving this flow as an MCP tool, the agent can selectively call this tool if a URL is detected in the chat input."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/bundles-elastic#opensearch",children:[(0,o.jsx)(n.strong,{children:"OpenSearch"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Tools"})," port, this component lets the agent search your ",(0,o.jsx)(n.a,{href:"/knowledge",children:"OpenRAG OpenSearch knowledge base"}),". The agent might not use this database for every request; the agent uses this connection only if it decides that documents in your knowledge base are relevant to your query."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-embedding-models",children:[(0,o.jsx)(n.strong,{children:"Embedding Model"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component's ",(0,o.jsx)(n.strong,{children:"Embedding"})," port, this component generates embeddings from chat input that are used in ",(0,o.jsx)(n.a,{href:"https://www.ibm.com/think/topics/vector-search",children:"similarity search"})," to find content in your knowledge base that is relevant to the chat input. The agent uses this information to generate context-aware responses that are specialized for your data."]}),"\n",(0,o.jsxs)(n.p,{children:["It is critical that the embedding model used here matches the embedding model used when you ",(0,o.jsx)(n.a,{href:"/ingestion",children:"upload documents to your knowledge base"}),". Mismatched models and dimensions can degrade the quality of similarity search results causing the agent to retrieve irrelevant documents from your knowledge base."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Text Input"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component's ",(0,o.jsx)(n.strong,{children:"Search Filters"})," port, this component is populated with a Langflow global variable named ",(0,o.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"}),". If a global or chat-level ",(0,o.jsx)(n.a,{href:"/knowledge-filters",children:"knowledge filter"})," is set, then the variable contains the filter expression, which limits the documents that the agent can access in the knowledge base.\nIf no knowledge filter is set, then the ",(0,o.jsx)(n.code,{children:"OPENRAG-QUERY-FILTER"})," variable is empty, and the agent can access all documents in the knowledge base."]}),"\n"]}),"\n",(0,o.jsxs)(n.li,{children:["\n",(0,o.jsxs)(n.p,{children:[(0,o.jsxs)(n.a,{href:"https://docs.langflow.org/components-io",children:[(0,o.jsx)(n.strong,{children:"Chat Output"})," component"]}),": Connected to the ",(0,o.jsx)(n.strong,{children:"Agent"})," component's ",(0,o.jsx)(n.strong,{children:"Output"})," port, this component returns the agent's generated response as a chat message."]}),"\n"]}),"\n"]}),"\n",(0,o.jsx)(n.h2,{id:"nudges",children:"Nudges"}),"\n",(0,o.jsxs)(n.p,{children:["When you use the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"}),", the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Nudges"})," flow runs in the background to pull additional context from your knowledge base and chat history."]}),"\n",(0,o.jsxs)(n.p,{children:["Nudges appear as prompts in the chat.\nClick a nudge to accept it and provide the nudge's context to the OpenRAG ",(0,o.jsx)(n.strong,{children:"Chat"})," agent (the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow)."]}),"\n",(0,o.jsxs)(n.p,{children:["Like OpenRAG's other built-in flows, you can ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"inspect the flow in Langflow"}),", and you can customize it if you want to change the nudge behavior."]}),"\n",(0,o.jsx)(n.h2,{id:"upload-documents-to-the-chat",children:"Upload documents to the chat"}),"\n",(0,o.jsx)(l.Ay,{}),"\n",(0,o.jsx)(n.h2,{id:"inspect-tool-calls-and-knowledge",children:"Inspect tool calls and knowledge"}),"\n",(0,o.jsxs)(n.p,{children:["During the chat, you'll see information about the agent's process. For more detail, you can inspect individual tool calls. This is helpful for troubleshooting because it shows you how the agent used particular tools. For example, click ",(0,o.jsx)(a.A,{name:"Gear","aria-hidden":"true"})," ",(0,o.jsx)(n.strong,{children:"Function Call: search_documents (tool_call)"})," to view the log of tool calls made by the agent to the ",(0,o.jsx)(n.strong,{children:"OpenSearch"})," component."]}),"\n",(0,o.jsxs)(n.p,{children:["If documents in your knowledge base seem to be missing or interpreted incorrectly, see ",(0,o.jsx)(n.a,{href:"/ingestion#troubleshoot-ingestion",children:"Troubleshoot ingestion"}),"."]}),"\n",(0,o.jsxs)(n.p,{children:["If tool calls and knowledge appear normal, but the agent's responses seem off-topic or incorrect, consider changing the agent's language model or prompt, as explained in ",(0,o.jsx)(n.a,{href:"/agents#inspect-and-modify-flows",children:"Inspect and modify flows"}),"."]}),"\n",(0,o.jsx)(n.h2,{id:"integrate-openrag-chat-into-an-application",children:"Integrate OpenRAG chat into an application"}),"\n",(0,o.jsxs)(n.p,{children:["You can integrate OpenRAG flows into your applications using the ",(0,o.jsx)(n.a,{href:"https://docs.langflow.org/api-reference-api-examples",children:"Langflow API"}),".\nTo simplify this integration, you can get pre-configured code snippets directly from the embedded Langflow visual editor."]}),"\n",(0,o.jsxs)(n.p,{children:["The following example demonstrates how to generate and use code snippets for the ",(0,o.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow:"]}),"\n",(0,o.jsx)(i.Ay,{})]})}function g(e={}){const{wrapper:n}={...(0,r.R)(),...e.components};return n?(0,o.jsx)(n,{...e,children:(0,o.jsx)(p,{...e})}):p(e)}},3982:(e,n,t)=>{t.d(n,{A:()=>s});const s=t.p+"assets/images/opensearch-agent-flow-f3b279e02425cd043002eb7749067108.png"},7224:(e,n,t)=>{t.d(n,{Ay:()=>h,RM:()=>l});var s=t(4848),o=t(8453),r=t(9179),a=t(1470),i=t(9365);const l=[];function c(e){const n={a:"a",code:"code",li:"li",ol:"ol",p:"p",pre:"pre",strong:"strong",ul:"ul",...(0,o.R)(),...e.components};return(0,s.jsxs)(s.Fragment,{children:[(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Open the ",(0,s.jsx)(n.strong,{children:"OpenRAG OpenSearch Agent"})," flow in the Langflow visual editor: From the ",(0,s.jsx)(n.strong,{children:"Chat"})," window, click ",(0,s.jsx)(r.A,{name:"Settings2","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Settings"}),", click ",(0,s.jsx)(n.strong,{children:"Edit in Langflow"}),", and then click ",(0,s.jsx)(n.strong,{children:"Proceed"}),"."]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Optional: If you don't want to use the Langflow API key that is generated automatically when you install OpenRAG, you can create a ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/api-keys-and-authentication",children:"Langflow API key"}),".\nThis key doesn't grant access to OpenRAG; it is only for authenticating with the Langflow API."]}),"\n",(0,s.jsxs)(n.ol,{children:["\n",(0,s.jsxs)(n.li,{children:["In the Langflow visual editor, click your user icon in the header, and then select ",(0,s.jsx)(n.strong,{children:"Settings"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:["Click ",(0,s.jsx)(n.strong,{children:"Langflow API Keys"}),", and then click ",(0,s.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," ",(0,s.jsx)(n.strong,{children:"Add New"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:["Name your key, and then click ",(0,s.jsx)(n.strong,{children:"Create API Key"}),"."]}),"\n",(0,s.jsx)(n.li,{children:"Copy the API key and store it securely."}),"\n",(0,s.jsxs)(n.li,{children:["Exit the Langflow ",(0,s.jsx)(n.strong,{children:"Settings"})," page to return to the visual editor."]}),"\n"]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsxs)(n.p,{children:["Click ",(0,s.jsx)(n.strong,{children:"Share"}),", and then select ",(0,s.jsx)(n.strong,{children:"API access"})," to get pregenerated code snippets that call the Langflow API and run the flow."]}),"\n",(0,s.jsxs)(n.p,{children:["These code snippets construct API requests with your Langflow server URL (",(0,s.jsx)(n.code,{children:"LANGFLOW_SERVER_ADDRESS"}),"), the flow to run (",(0,s.jsx)(n.code,{children:"FLOW_ID"}),"), required headers (",(0,s.jsx)(n.code,{children:"LANGFLOW_API_KEY"}),", ",(0,s.jsx)(n.code,{children:"Content-Type"}),"), and a payload containing the required inputs to run the flow, including a default chat input message."]}),"\n",(0,s.jsx)(n.p,{children:"In production, you would modify the inputs to suit your application logic. For example, you could replace the default chat input message with dynamic user input."}),"\n",(0,s.jsxs)(a.A,{children:[(0,s.jsx)(i.A,{value:"python",label:"Python",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-python",children:'import requests\nimport os\nimport uuid\n\napi_key = \'LANGFLOW_API_KEY\'\nurl = "http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID" # The complete API endpoint URL for this flow\n\n# Request payload configuration\npayload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n}\npayload["session_id"] = str(uuid.uuid4())\n\nheaders = {"x-api-key": api_key}\n\ntry:\n # Send API request\n response = requests.request("POST", url, json=payload, headers=headers)\n response.raise_for_status() # Raise exception for bad status codes\n\n # Print response\n print(response.text)\n\nexcept requests.exceptions.RequestException as e:\n print(f"Error making API request: {e}")\nexcept ValueError as e:\n print(f"Error parsing response: {e}")\n'})})}),(0,s.jsx)(i.A,{value:"typescript",label:"TypeScript",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-typescript",children:'const crypto = require(\'crypto\');\nconst apiKey = \'LANGFLOW_API_KEY\';\nconst payload = {\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n};\npayload.session_id = crypto.randomUUID();\n\nconst options = {\n method: \'POST\',\n headers: {\n \'Content-Type\': \'application/json\',\n "x-api-key": apiKey\n },\n body: JSON.stringify(payload)\n};\n\nfetch(\'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID\', options)\n .then(response => response.json())\n .then(response => console.warn(response))\n .catch(err => console.error(err));\n'})})}),(0,s.jsx)(i.A,{value:"curl",label:"curl",children:(0,s.jsx)(n.pre,{children:(0,s.jsx)(n.code,{className:"language-bash",children:'curl --request POST \\\n --url \'http://LANGFLOW_SERVER_ADDRESS/api/v1/run/FLOW_ID?stream=false\' \\\n --header \'Content-Type: application/json\' \\\n --header "x-api-key: LANGFLOW_API_KEY" \\\n --data \'{\n "output_type": "chat",\n "input_type": "chat",\n "input_value": "hello world!"\n }\'\n'})})})]}),"\n"]}),"\n",(0,s.jsxs)(n.li,{children:["\n",(0,s.jsx)(n.p,{children:"Copy your preferred snippet, and then run it:"}),"\n",(0,s.jsxs)(n.ul,{children:["\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"Python"}),": Paste the snippet into a ",(0,s.jsx)(n.code,{children:".py"})," file, save it, and then run it with ",(0,s.jsx)(n.code,{children:"python filename.py"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"TypeScript"}),": Paste the snippet into a ",(0,s.jsx)(n.code,{children:".ts"})," file, save it, and then run it with ",(0,s.jsx)(n.code,{children:"ts-node filename.ts"}),"."]}),"\n",(0,s.jsxs)(n.li,{children:[(0,s.jsx)(n.strong,{children:"curl"}),": Paste and run snippet directly in your terminal."]}),"\n"]}),"\n"]}),"\n"]}),"\n",(0,s.jsx)(n.p,{children:"If the request is successful, the response includes many details about the flow run, including the session ID, inputs, outputs, components, durations, and more."}),"\n",(0,s.jsxs)(n.p,{children:["In production, you won't pass the raw response to the user in its entirety.\nInstead, you extract and reformat relevant fields for different use cases, as demonstrated in the ",(0,s.jsx)(n.a,{href:"https://docs.langflow.org/quickstart#extract-data-from-the-response",children:"Langflow quickstart"}),".\nFor example, you could pass the chat output text to a front-end user-facing application, and store specific fields in logs and backend data stores for monitoring, chat history, or analytics.\nYou could also pass the output from one flow as input to another flow."]})]})}function h(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(c,{...e})}):c(e)}},8401:(e,n,t)=>{t.d(n,{Ay:()=>l,RM:()=>a});var s=t(4848),o=t(8453),r=t(9179);const a=[];function i(e){const n={p:"p",strong:"strong",...(0,o.R)(),...e.components};return(0,s.jsxs)(n.p,{children:["When using the OpenRAG ",(0,s.jsx)(n.strong,{children:"Chat"}),", click ",(0,s.jsx)(r.A,{name:"Plus","aria-hidden":"true"})," in the chat input field to upload a file to the current chat session.\nFiles added this way are processed and made available to the agent for the current conversation only.\nThese files aren't stored in the knowledge base permanently."]})}function l(e={}){const{wrapper:n}={...(0,o.R)(),...e.components};return n?(0,s.jsx)(n,{...e,children:(0,s.jsx)(i,{...e})}):i(e)}},9179:(e,n,t)=>{t.d(n,{A:()=>r});t(6540);var s=t(7856),o=t(4848);function r({name:e,...n}){const t=s[e];return t?(0,o.jsx)(t,{...n}):null}},9365:(e,n,t)=>{t.d(n,{A:()=>a});t(6540);var s=t(4164);const o={tabItem:"tabItem_Ymn6"};var r=t(4848);function a({children:e,hidden:n,className:t}){return(0,r.jsx)("div",{role:"tabpanel",className:(0,s.A)(o.tabItem,t),hidden:n,children:e})}}}]);