diff --git a/.env.example b/.env.example index 84787216..6bb49c99 100644 --- a/.env.example +++ b/.env.example @@ -1,16 +1,24 @@ # make one like so https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key LANGFLOW_SECRET_KEY= + # flow ids for chat and ingestion flows LANGFLOW_CHAT_FLOW_ID=1098eea1-6649-4e1d-aed1-b77249fb8dd0 LANGFLOW_INGEST_FLOW_ID=5488df7c-b93f-4f87-a446-b67028bc0813 +NUDGES_FLOW_ID=ebc01d31-1976-46ce-a385-b0240327226c + +# Set a strong admin password for OpenSearch; a bcrypt hash is generated at +# container startup from this value. Do not commit real secrets. # must match the hashed password in secureconfig, must change for secure deployment!!! OPENSEARCH_PASSWORD= + # make here https://console.cloud.google.com/apis/credentials GOOGLE_OAUTH_CLIENT_ID= GOOGLE_OAUTH_CLIENT_SECRET= + # Azure app registration credentials for SharePoint/OneDrive MICROSOFT_GRAPH_OAUTH_CLIENT_ID= MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET= + # OPTIONAL: dns routable from google (etc.) to handle continous ingest (something like ngrok works). This enables continous ingestion WEBHOOK_BASE_URL= diff --git a/docker-compose-cpu.yml b/docker-compose-cpu.yml index d22c2491..2e4e796e 100644 --- a/docker-compose-cpu.yml +++ b/docker-compose-cpu.yml @@ -1,8 +1,9 @@ services: opensearch: - build: - context: . - dockerfile: Dockerfile + image: phact/openrag-opensearch:${OPENRAG_VERSION:-latest} + #build: + # context: . + # dockerfile: Dockerfile container_name: os depends_on: - openrag-backend @@ -38,10 +39,10 @@ services: - "5601:5601" openrag-backend: - image: phact/openrag-backend:latest + image: phact/openrag-backend:${OPENRAG_VERSION:-latest} #build: - #context: . - #dockerfile: Dockerfile.backend + #context: . + #dockerfile: Dockerfile.backend container_name: openrag-backend depends_on: - langflow @@ -53,6 +54,7 @@ services: - LANGFLOW_SUPERUSER=${LANGFLOW_SUPERUSER} - LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD} - LANGFLOW_CHAT_FLOW_ID=${LANGFLOW_CHAT_FLOW_ID} + - NUDGES_FLOW_ID=${NUDGES_FLOW_ID} - OPENSEARCH_PORT=9200 - OPENSEARCH_USERNAME=admin - OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD} @@ -71,10 +73,10 @@ services: - ./keys:/app/keys:Z openrag-frontend: - image: phact/openrag-frontend:latest + image: phact/openrag-frontend:${OPENRAG_VERSION:-latest} #build: - #context: . - #dockerfile: Dockerfile.frontend + #context: . + #dockerfile: Dockerfile.frontend container_name: openrag-frontend depends_on: - openrag-backend @@ -86,7 +88,7 @@ services: langflow: volumes: - ./flows:/app/flows:Z - image: phact/langflow:responses + image: phact/langflow:${LANGFLOW_VERSION:-responses} container_name: langflow ports: - "7860:7860" diff --git a/docker-compose.yml b/docker-compose.yml index 47781eb6..091775fe 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,9 +1,9 @@ services: opensearch: - image: phact/openrag-opensearch:latest + image: phact/openrag-opensearch:${OPENRAG_VERSION:-latest} #build: - #context: . - #dockerfile: Dockerfile + #context: . + #dockerfile: Dockerfile container_name: os depends_on: - openrag-backend @@ -39,10 +39,10 @@ services: - "5601:5601" openrag-backend: - image: phact/openrag-backend:latest + image: phact/openrag-backend:${OPENRAG_VERSION:-latest} #build: - #context: . - #dockerfile: Dockerfile.backend + #context: . + #dockerfile: Dockerfile.backend container_name: openrag-backend depends_on: - langflow @@ -53,6 +53,7 @@ services: - LANGFLOW_SUPERUSER=${LANGFLOW_SUPERUSER} - LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD} - LANGFLOW_CHAT_FLOW_ID=${LANGFLOW_CHAT_FLOW_ID} + - NUDGES_FLOW_ID=${NUDGES_FLOW_ID} - OPENSEARCH_PORT=9200 - OPENSEARCH_USERNAME=admin - OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD} @@ -72,10 +73,10 @@ services: gpus: all openrag-frontend: - image: phact/openrag-frontend:latest + image: phact/openrag-frontend:${OPENRAG_VERSION:-latest} #build: - #context: . - #dockerfile: Dockerfile.frontend + #context: . + #dockerfile: Dockerfile.frontend container_name: openrag-frontend depends_on: - openrag-backend @@ -87,7 +88,7 @@ services: langflow: volumes: - ./flows:/app/flows:Z - image: phact/langflow:responses + image: phact/langflow:${LANGFLOW_VERSION:-responses} container_name: langflow ports: - "7860:7860" @@ -104,4 +105,4 @@ services: - LANGFLOW_SUPERUSER=${LANGFLOW_SUPERUSER} - LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD} - LANGFLOW_NEW_USER_IS_ACTIVE=${LANGFLOW_NEW_USER_IS_ACTIVE} - - LANGFLOW_ENABLE_SUPERUSER_CLI=${LANGFLOW_ENABLE_SUPERUSER_CLI} \ No newline at end of file + - LANGFLOW_ENABLE_SUPERUSER_CLI=${LANGFLOW_ENABLE_SUPERUSER_CLI} diff --git a/flows/openrag_nudges.json b/flows/openrag_nudges.json new file mode 100644 index 00000000..4f5e775a --- /dev/null +++ b/flows/openrag_nudges.json @@ -0,0 +1,2289 @@ +{ + "data": { + "edges": [ + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "EmbeddingModel", + "id": "EmbeddingModel-eZ6bT", + "name": "embeddings", + "output_types": [ + "Embeddings" + ] + }, + "targetHandle": { + "fieldName": "embedding", + "id": "OpenSearch-iYfjf", + "inputTypes": [ + "Embeddings" + ], + "type": "other" + } + }, + "id": "xy-edge__EmbeddingModel-eZ6bT{œdataTypeœ:œEmbeddingModelœ,œidœ:œEmbeddingModel-eZ6bTœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-OpenSearch-iYfjf{œfieldNameœ:œembeddingœ,œidœ:œOpenSearch-iYfjfœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}", + "selected": false, + "source": "EmbeddingModel-eZ6bT", + "sourceHandle": "{œdataTypeœ:œEmbeddingModelœ,œidœ:œEmbeddingModel-eZ6bTœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}", + "target": "OpenSearch-iYfjf", + "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œOpenSearch-iYfjfœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "LanguageModelComponent", + "id": "LanguageModelComponent-0YME7", + "name": "text_output", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-BMVN5", + "inputTypes": [ + "Data", + "DataFrame", + "Message" + ], + "type": "other" + } + }, + "id": "xy-edge__LanguageModelComponent-0YME7{œdataTypeœ:œLanguageModelComponentœ,œidœ:œLanguageModelComponent-0YME7œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-BMVN5{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-BMVN5œ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}", + "selected": false, + "source": "LanguageModelComponent-0YME7", + "sourceHandle": "{œdataTypeœ:œLanguageModelComponentœ,œidœ:œLanguageModelComponent-0YME7œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", + "target": "ChatOutput-BMVN5", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-BMVN5œ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "ParserComponent", + "id": "ParserComponent-tZs7s", + "name": "parsed_text", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "docs", + "id": "Prompt Template-Wo6kR", + "inputTypes": [ + "Message" + ], + "type": "str" + } + }, + "id": "xy-edge__ParserComponent-tZs7s{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-tZs7sœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}-Prompt Template-Wo6kR{œfieldNameœ:œdocsœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ParserComponent-tZs7s", + "sourceHandle": "{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-tZs7sœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}", + "target": "Prompt Template-Wo6kR", + "targetHandle": "{œfieldNameœ:œdocsœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "Prompt Template", + "id": "Prompt Template-Wo6kR", + "name": "prompt", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "input_value", + "id": "LanguageModelComponent-0YME7", + "inputTypes": [ + "Message" + ], + "type": "str" + } + }, + "id": "xy-edge__Prompt Template-Wo6kR{œdataTypeœ:œPrompt Templateœ,œidœ:œPrompt Template-Wo6kRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-LanguageModelComponent-0YME7{œfieldNameœ:œinput_valueœ,œidœ:œLanguageModelComponent-0YME7œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "Prompt Template-Wo6kR", + "sourceHandle": "{œdataTypeœ:œPrompt Templateœ,œidœ:œPrompt Template-Wo6kRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", + "target": "LanguageModelComponent-0YME7", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œLanguageModelComponent-0YME7œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + }, + { + "animated": false, + "className": "", + "data": { + "sourceHandle": { + "dataType": "OpenSearchHybrid", + "id": "OpenSearch-iYfjf", + "name": "dataframe", + "output_types": [ + "DataFrame" + ] + }, + "targetHandle": { + "fieldName": "input_data", + "id": "ParserComponent-tZs7s", + "inputTypes": [ + "DataFrame", + "Data" + ], + "type": "other" + } + }, + "id": "xy-edge__OpenSearch-iYfjf{œdataTypeœ:œOpenSearchHybridœ,œidœ:œOpenSearch-iYfjfœ,œnameœ:œdataframeœ,œoutput_typesœ:[œDataFrameœ]}-ParserComponent-tZs7s{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-tZs7sœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}", + "selected": false, + "source": "OpenSearch-iYfjf", + "sourceHandle": "{œdataTypeœ:œOpenSearchHybridœ,œidœ:œOpenSearch-iYfjfœ,œnameœ:œdataframeœ,œoutput_typesœ:[œDataFrameœ]}", + "target": "ParserComponent-tZs7s", + "targetHandle": "{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-tZs7sœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}" + }, + { + "animated": false, + "data": { + "sourceHandle": { + "dataType": "ChatInput", + "id": "ChatInput-bqH7H", + "name": "message", + "output_types": [ + "Message" + ] + }, + "targetHandle": { + "fieldName": "prompt", + "id": "Prompt Template-Wo6kR", + "inputTypes": [ + "Message" + ], + "type": "str" + } + }, + "id": "xy-edge__ChatInput-bqH7H{œdataTypeœ:œChatInputœ,œidœ:œChatInput-bqH7Hœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt Template-Wo6kR{œfieldNameœ:œpromptœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", + "selected": false, + "source": "ChatInput-bqH7H", + "sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-bqH7Hœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", + "target": "Prompt Template-Wo6kR", + "targetHandle": "{œfieldNameœ:œpromptœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" + } + ], + "nodes": [ + { + "data": { + "id": "ChatInput-bqH7H", + "node": { + "base_classes": [ + "Message" + ], + "beta": false, + "category": "inputs", + "conditional_paths": [], + "custom_fields": {}, + "description": "Get chat inputs from the Playground.", + "display_name": "Chat Input", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "files", + "background_color", + "chat_icon", + "text_color" + ], + "frozen": false, + "icon": "MessagesSquare", + "key": "ChatInput", + "legacy": false, + "lf_version": "1.5.0.post1", + "metadata": {}, + "minimized": true, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Chat Message", + "group_outputs": false, + "method": "message_response", + "name": "message", + "selected": "Message", + "tool_mode": true, + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "score": 0.0020353564437605998, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs.inputs import BoolInput\nfrom langflow.io import (\n DropdownInput,\n FileInput,\n MessageTextInput,\n MultilineInput,\n Output,\n)\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_USER,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n documentation: str = \"https://docs.langflow.org/components-io#chat-input\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n minimized = True\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Input Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n input_types=[],\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n temp_file=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Chat Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\n \"background_color\": background_color,\n \"text_color\": text_color,\n \"icon\": icon,\n },\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" + }, + "files": { + "_input_type": "FileInput", + "advanced": true, + "display_name": "Files", + "dynamic": false, + "fileTypes": [ + "txt", + "md", + "mdx", + "csv", + "json", + "yaml", + "yml", + "xml", + "html", + "htm", + "pdf", + "docx", + "py", + "sh", + "sql", + "js", + "ts", + "tsx", + "jpg", + "jpeg", + "png", + "bmp", + "image" + ], + "file_path": "", + "info": "Files to be sent with the message.", + "list": true, + "list_add_label": "Add More", + "name": "files", + "placeholder": "", + "required": false, + "show": true, + "temp_file": true, + "title_case": false, + "trace_as_metadata": true, + "type": "file", + "value": "" + }, + "input_value": { + "_input_type": "MultilineInput", + "advanced": false, + "copy_field": false, + "display_name": "Input Text", + "dynamic": false, + "info": "Message to be passed as input.", + "input_types": [], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "multiline": true, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "dialog_inputs": {}, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": [ + "Machine", + "User" + ], + "options_metadata": [], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "User" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "list_add_label": "Add More", + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "selected_output": "message", + "showNode": false, + "type": "ChatInput" + }, + "dragging": false, + "id": "ChatInput-bqH7H", + "measured": { + "height": 48, + "width": 192 + }, + "position": { + "x": 1241.018940999428, + "y": 1088.6203941617916 + }, + "selected": false, + "type": "genericNode" + }, + { + "data": { + "id": "ChatOutput-BMVN5", + "node": { + "base_classes": [ + "Message" + ], + "beta": false, + "category": "outputs", + "conditional_paths": [], + "custom_fields": {}, + "description": "Display a chat message in the Playground.", + "display_name": "Chat Output", + "documentation": "", + "edited": false, + "field_order": [ + "input_value", + "should_store_message", + "sender", + "sender_name", + "session_id", + "data_template", + "background_color", + "chat_icon", + "text_color", + "clean_data" + ], + "frozen": false, + "icon": "MessagesSquare", + "key": "ChatOutput", + "legacy": false, + "lf_version": "1.5.0.post1", + "metadata": {}, + "minimized": true, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Output Message", + "group_outputs": false, + "method": "message_response", + "name": "message", + "selected": "Message", + "tool_mode": true, + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "score": 0.003169567463043492, + "template": { + "_type": "Component", + "background_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Background Color", + "dynamic": false, + "info": "The background color of the icon.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "background_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "chat_icon": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Icon", + "dynamic": false, + "info": "The icon of the message.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "chat_icon", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "clean_data": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Basic Clean Data", + "dynamic": false, + "info": "Whether to clean the data", + "list": false, + "list_add_label": "Add More", + "name": "clean_data", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from collections.abc import Generator\nfrom typing import Any\n\nimport orjson\nfrom fastapi.encoders import jsonable_encoder\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.helpers.data import safe_convert\nfrom langflow.inputs.inputs import BoolInput, DropdownInput, HandleInput, MessageTextInput\nfrom langflow.schema.data import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.template.field.base import Output\nfrom langflow.utils.constants import (\n MESSAGE_SENDER_AI,\n MESSAGE_SENDER_NAME_AI,\n MESSAGE_SENDER_USER,\n)\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n documentation: str = \"https://docs.langflow.org/components-io#chat-output\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n minimized = True\n\n inputs = [\n HandleInput(\n name=\"input_value\",\n display_name=\"Inputs\",\n info=\"Message to be passed as output.\",\n input_types=[\"Data\", \"DataFrame\", \"Message\"],\n required=True,\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n BoolInput(\n name=\"clean_data\",\n display_name=\"Basic Clean Data\",\n value=True,\n info=\"Whether to clean the data\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Output Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n # Handle case where source is a ChatOpenAI object\n if hasattr(source, \"model_name\"):\n source_dict[\"source\"] = source.model_name\n elif hasattr(source, \"model\"):\n source_dict[\"source\"] = str(source.model)\n else:\n source_dict[\"source\"] = str(source)\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n # First convert the input to string if needed\n text = self.convert_to_string()\n\n # Get source properties\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n\n # Create or use existing Message object\n if isinstance(self.input_value, Message):\n message = self.input_value\n # Update message properties\n message.text = text\n else:\n message = Message(text=text)\n\n # Set message properties\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n\n # Store message if needed\n if self.session_id and self.should_store_message:\n stored_message = await self.send_message(message)\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n\n def _serialize_data(self, data: Data) -> str:\n \"\"\"Serialize Data object to JSON string.\"\"\"\n # Convert data.data to JSON-serializable format\n serializable_data = jsonable_encoder(data.data)\n # Serialize with orjson, enabling pretty printing with indentation\n json_bytes = orjson.dumps(serializable_data, option=orjson.OPT_INDENT_2)\n # Convert bytes to string and wrap in Markdown code blocks\n return \"```json\\n\" + json_bytes.decode(\"utf-8\") + \"\\n```\"\n\n def _validate_input(self) -> None:\n \"\"\"Validate the input data and raise ValueError if invalid.\"\"\"\n if self.input_value is None:\n msg = \"Input data cannot be None\"\n raise ValueError(msg)\n if isinstance(self.input_value, list) and not all(\n isinstance(item, Message | Data | DataFrame | str) for item in self.input_value\n ):\n invalid_types = [\n type(item).__name__\n for item in self.input_value\n if not isinstance(item, Message | Data | DataFrame | str)\n ]\n msg = f\"Expected Data or DataFrame or Message or str, got {invalid_types}\"\n raise TypeError(msg)\n if not isinstance(\n self.input_value,\n Message | Data | DataFrame | str | list | Generator | type(None),\n ):\n type_name = type(self.input_value).__name__\n msg = f\"Expected Data or DataFrame or Message or str, Generator or None, got {type_name}\"\n raise TypeError(msg)\n\n def convert_to_string(self) -> str | Generator[Any, None, None]:\n \"\"\"Convert input data to string with proper error handling.\"\"\"\n self._validate_input()\n if isinstance(self.input_value, list):\n return \"\\n\".join([safe_convert(item, clean_data=self.clean_data) for item in self.input_value])\n if isinstance(self.input_value, Generator):\n return self.input_value\n return safe_convert(self.input_value)\n" + }, + "data_template": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Data Template", + "dynamic": false, + "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "data_template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "input_value": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Inputs", + "dynamic": false, + "info": "Message to be passed as output.", + "input_types": [ + "Data", + "DataFrame", + "Message" + ], + "list": false, + "list_add_label": "Add More", + "name": "input_value", + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "sender": { + "_input_type": "DropdownInput", + "advanced": true, + "combobox": false, + "dialog_inputs": {}, + "display_name": "Sender Type", + "dynamic": false, + "info": "Type of sender.", + "name": "sender", + "options": [ + "Machine", + "User" + ], + "options_metadata": [], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Machine" + }, + "sender_name": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Sender Name", + "dynamic": false, + "info": "Name of the sender.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "sender_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "AI" + }, + "session_id": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Session ID", + "dynamic": false, + "info": "The session ID of the chat. If empty, the current session ID parameter will be used.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "session_id", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "should_store_message": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Store Messages", + "dynamic": false, + "info": "Store the message in the history.", + "list": false, + "list_add_label": "Add More", + "name": "should_store_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "text_color": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Text Color", + "dynamic": false, + "info": "The text color of the name", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "text_color", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "showNode": false, + "type": "ChatOutput" + }, + "dragging": false, + "id": "ChatOutput-BMVN5", + "measured": { + "height": 48, + "width": 192 + }, + "position": { + "x": 2642.8863560882705, + "y": 740.3042509819792 + }, + "selected": false, + "type": "genericNode" + }, + { + "data": { + "id": "OpenSearch-iYfjf", + "node": { + "base_classes": [ + "Data", + "DataFrame", + "VectorStore" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Hybrid search: KNN + keyword, with optional filters, min_score, and aggregations.", + "display_name": "OpenSearch (Hybrid)", + "documentation": "", + "edited": true, + "field_order": [ + "opensearch_url", + "index_name", + "ingest_data", + "search_query", + "should_cache_vector_store", + "embedding", + "vector_field", + "number_of_results", + "filter_expression", + "auth_mode", + "username", + "password", + "jwt_token", + "jwt_header", + "bearer_prefix", + "use_ssl", + "verify_certs" + ], + "frozen": false, + "icon": "OpenSearch", + "last_updated": "2025-09-04T19:50:05.460Z", + "legacy": false, + "lf_version": "1.5.0.post2", + "metadata": {}, + "minimized": false, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Search Results", + "group_outputs": false, + "hidden": null, + "method": "search_documents", + "name": "search_results", + "options": null, + "required_inputs": null, + "tool_mode": true, + "types": [ + "Data" + ], + "value": "__UNDEFINED__" + }, + { + "allows_loop": false, + "cache": true, + "display_name": "DataFrame", + "group_outputs": false, + "hidden": null, + "method": "as_dataframe", + "name": "dataframe", + "options": null, + "required_inputs": null, + "selected": "DataFrame", + "tool_mode": true, + "types": [ + "DataFrame" + ], + "value": "__UNDEFINED__" + }, + { + "allows_loop": false, + "cache": true, + "display_name": "Vector Store Connection", + "group_outputs": false, + "hidden": true, + "method": "as_vector_store", + "name": "vectorstoreconnection", + "options": null, + "required_inputs": null, + "tool_mode": true, + "types": [ + "VectorStore" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "auth_mode": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "dialog_inputs": {}, + "display_name": "Auth Mode", + "dynamic": false, + "info": "Choose Basic (username/password) or JWT (Bearer token).", + "load_from_db": false, + "name": "auth_mode", + "options": [ + "basic", + "jwt" + ], + "options_metadata": [], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "toggle": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "jwt" + }, + "bearer_prefix": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Prefix 'Bearer '", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "bearer_prefix", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from __future__ import annotations\n\nimport json\nfrom typing import Any, Dict, List\n\nfrom opensearchpy import OpenSearch, helpers\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.base.vectorstores.vector_store_connection_decorator import vector_store_connection\nfrom langflow.io import (\n BoolInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n DropdownInput,\n)\nfrom langflow.schema.data import Data\n\n\n@vector_store_connection\nclass OpenSearchHybridComponent(LCVectorStoreComponent):\n \"\"\"OpenSearch hybrid search: KNN (k=10, boost=0.7) + multi_match (boost=0.3) with optional filters & min_score.\"\"\"\n display_name: str = \"OpenSearch (Hybrid)\"\n name: str = \"OpenSearchHybrid\"\n icon: str = \"OpenSearch\"\n description: str = \"Hybrid search: KNN + keyword, with optional filters, min_score, and aggregations.\"\n\n # Keys we consider baseline\n default_keys: list[str] = [\n \"opensearch_url\",\n \"index_name\",\n *[i.name for i in LCVectorStoreComponent.inputs], # search_query, add_documents, etc.\n \"embedding\",\n \"vector_field\",\n \"number_of_results\",\n \"auth_mode\",\n \"username\",\n \"password\",\n \"jwt_token\",\n \"jwt_header\",\n \"bearer_prefix\",\n \"use_ssl\",\n \"verify_certs\",\n \"filter_expression\",\n ]\n\n inputs = [\n StrInput(\n name=\"opensearch_url\",\n display_name=\"OpenSearch URL\",\n value=\"http://localhost:9200\",\n info=\"URL for your OpenSearch cluster.\"\n ),\n StrInput(\n name=\"index_name\",\n display_name=\"Index Name\",\n value=\"langflow\",\n info=\"The index to search.\"\n ),\n *LCVectorStoreComponent.inputs, # includes search_query, add_documents, etc.\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding\",\n input_types=[\"Embeddings\"]\n ),\n StrInput(\n name=\"vector_field\",\n display_name=\"Vector Field\",\n value=\"chunk_embedding\",\n advanced=True,\n info=\"Vector field used for KNN.\"\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Default Size (limit)\",\n value=10,\n advanced=True,\n info=\"Default number of hits when no limit provided in filter_expression.\"\n ),\n MultilineInput(\n name=\"filter_expression\",\n display_name=\"Filter Expression (JSON)\",\n value=\"\",\n info=(\n \"Optional JSON to control filters/limit/score threshold.\\n\"\n \"Accepted shapes:\\n\"\n '1) {\"filter\": [ {\"term\": {\"filename\":\"foo\"}}, {\"terms\":{\"owner\":[\"u1\",\"u2\"]}} ], \"limit\": 10, \"score_threshold\": 1.6 }\\n'\n '2) Context-style maps: {\"data_sources\":[\"fileA\"], \"document_types\":[\"application/pdf\"], \"owners\":[\"123\"]}\\n'\n \"Placeholders with __IMPOSSIBLE_VALUE__ are ignored.\"\n )\n ),\n\n # ----- Auth controls (dynamic) -----\n DropdownInput(\n name=\"auth_mode\",\n display_name=\"Auth Mode\",\n value=\"basic\",\n options=[\"basic\", \"jwt\"],\n info=\"Choose Basic (username/password) or JWT (Bearer token).\",\n real_time_refresh=True,\n advanced=False,\n ),\n StrInput(\n name=\"username\",\n display_name=\"Username\",\n value=\"admin\",\n show=True,\n ),\n SecretStrInput(\n name=\"password\",\n display_name=\"Password\",\n value=\"admin\",\n show=True,\n ),\n SecretStrInput(\n name=\"jwt_token\",\n display_name=\"JWT Token\",\n value=\"\",\n show=False,\n info=\"Paste a valid JWT (sent as a header).\",\n ),\n StrInput(\n name=\"jwt_header\",\n display_name=\"JWT Header Name\",\n value=\"Authorization\",\n show=False,\n advanced=True,\n ),\n BoolInput(\n name=\"bearer_prefix\",\n display_name=\"Prefix 'Bearer '\",\n value=True,\n show=False,\n advanced=True,\n ),\n\n # ----- TLS -----\n BoolInput(\n name=\"use_ssl\",\n display_name=\"Use SSL\",\n value=True,\n advanced=True\n ),\n BoolInput(\n name=\"verify_certs\",\n display_name=\"Verify Certificates\",\n value=False,\n advanced=True\n ),\n ]\n\n # ---------- auth / client ----------\n def _build_auth_kwargs(self) -> Dict[str, Any]:\n mode = (self.auth_mode or \"basic\").strip().lower()\n if mode == \"jwt\":\n token = (self.jwt_token or \"\").strip()\n if not token:\n raise ValueError(\"Auth Mode is 'jwt' but no jwt_token was provided.\")\n header_name = (self.jwt_header or \"Authorization\").strip()\n header_value = f\"Bearer {token}\" if self.bearer_prefix else token\n return {\"headers\": {header_name: header_value}}\n user = (self.username or \"\").strip()\n pwd = (self.password or \"\").strip()\n if not user or not pwd:\n raise ValueError(\"Auth Mode is 'basic' but username/password are missing.\")\n return {\"http_auth\": (user, pwd)}\n\n def build_client(self) -> OpenSearch:\n auth_kwargs = self._build_auth_kwargs()\n return OpenSearch(\n hosts=[self.opensearch_url],\n use_ssl=self.use_ssl,\n verify_certs=self.verify_certs,\n ssl_assert_hostname=False,\n ssl_show_warn=False,\n **auth_kwargs,\n )\n\n @check_cached_vector_store\n def build_vector_store(self) -> OpenSearch:\n # Return raw OpenSearch client as our “vector store.”\n return self.build_client()\n\n # ---------- ingest ----------\n def _add_documents_to_vector_store(self, client: OpenSearch) -> None:\n docs = self._prepare_ingest_data() or []\n if not docs:\n self.log(\"No documents to ingest.\")\n return\n\n texts = [d.to_lc_document().page_content for d in docs]\n if not self.embedding:\n raise ValueError(\"Embedding handle is required to embed documents.\")\n vectors = self.embedding.embed_documents(texts)\n\n actions = []\n for doc_obj, vec in zip(docs, vectors):\n lc_doc = doc_obj.to_lc_document()\n body = {\n **lc_doc.metadata,\n \"text\": lc_doc.page_content,\n self.vector_field: vec,\n }\n actions.append({\n \"_op_type\": \"index\",\n \"_index\": self.index_name,\n \"_source\": body,\n })\n\n self.log(f\"Indexing {len(actions)} docs into '{self.index_name}'…\")\n helpers.bulk(client, actions)\n\n # ---------- helpers for filters ----------\n def _is_placeholder_term(self, term_obj: dict) -> bool:\n # term_obj like {\"filename\": \"__IMPOSSIBLE_VALUE__\"}\n return any(v == \"__IMPOSSIBLE_VALUE__\" for v in term_obj.values())\n\n def _coerce_filter_clauses(self, filter_obj: dict | None) -> List[dict]:\n \"\"\"\n Accepts either:\n A) {\"filter\":[ ...term/terms objects... ], \"limit\":..., \"score_threshold\":...}\n B) Context-style: {\"data_sources\":[...], \"document_types\":[...], \"owners\":[...]}\n Returns a list of OS filter clauses (term/terms), skipping placeholders and empty terms.\n \"\"\"\n \n if not filter_obj:\n return []\n\n # If it’s a string, try to parse it once\n if isinstance(filter_obj, str):\n try:\n filter_obj = json.loads(filter_obj)\n except Exception:\n # Not valid JSON → treat as no filters\n return []\n \n # Case A: already an explicit list/dict under \"filter\"\n if \"filter\" in filter_obj:\n raw = filter_obj[\"filter\"]\n if isinstance(raw, dict):\n raw = [raw]\n clauses: List[dict] = []\n for f in (raw or []):\n if \"term\" in f and isinstance(f[\"term\"], dict) and not self._is_placeholder_term(f[\"term\"]):\n clauses.append(f)\n elif \"terms\" in f and isinstance(f[\"terms\"], dict):\n field, vals = next(iter(f[\"terms\"].items()))\n if isinstance(vals, list) and len(vals) > 0:\n clauses.append(f)\n return clauses\n\n # Case B: convert context-style maps into clauses\n field_mapping = {\"data_sources\": \"filename\", \"document_types\": \"mimetype\", \"owners\": \"owner\"}\n print(f\"filter_obj {filter_obj}\")\n clauses: List[dict] = []\n for k, values in filter_obj.items():\n if not isinstance(values, list):\n continue\n field = field_mapping.get(k, k)\n if len(values) == 0:\n # Match-nothing placeholder (kept to mirror your tool semantics)\n clauses.append({\"term\": {field: \"__IMPOSSIBLE_VALUE__\"}})\n elif len(values) == 1:\n if values[0] != \"__IMPOSSIBLE_VALUE__\":\n clauses.append({\"term\": {field: values[0]}})\n else:\n clauses.append({\"terms\": {field: values}})\n return clauses\n\n # ---------- search (single hybrid path matching your tool) ----------\n def search(self, query: str | None = None) -> list[dict[str, Any]]:\n print(\"search method\")\n client = self.build_client()\n q = (query or \"\").strip()\n\n # Parse optional filter expression (can be either A or B shape; see _coerce_filter_clauses)\n filter_obj = None\n print(f\"DEBUG q {q}\")\n if getattr(self, \"filter_expression\", \"\") and self.filter_expression.strip():\n try:\n self.log(f\"DEBUG FILTER EXPRESSION {self.filter_expression}\")\n except json.JSONDecodeError as e:\n raise ValueError(f\"Invalid filter_expression JSON: {e}\") from e\n\n if not self.embedding:\n raise ValueError(\"Embedding is required to run hybrid search (KNN + keyword).\")\n\n # Embed the query\n vec = self.embedding.embed_query(q)\n\n # Build filter clauses (accept both shapes)\n clauses = self._coerce_filter_clauses(filter_obj)\n\n # Respect the tool's limit/threshold defaults\n limit = (filter_obj or {}).get(\"limit\", self.number_of_results)\n score_threshold = (filter_obj or {}).get(\"score_threshold\", 0)\n\n print(f\"DEBUG clauses {clauses}\")\n # Build the same hybrid body as your SearchService\n body = {\n \"query\": {\n \"bool\": {\n \"should\": [\n {\n \"knn\": {\n self.vector_field: {\n \"vector\": vec,\n \"k\": 10, # fixed to match the tool\n \"boost\": 0.7\n }\n }\n },\n {\n \"multi_match\": {\n \"query\": q,\n \"fields\": [\"text^2\", \"filename^1.5\"],\n \"type\": \"best_fields\",\n \"fuzziness\": \"AUTO\",\n \"boost\": 0.3\n }\n }\n ],\n \"minimum_should_match\": 1\n }\n },\n \"aggs\": {\n \"data_sources\": {\"terms\": {\"field\": \"filename\", \"size\": 20}},\n \"document_types\": {\"terms\": {\"field\": \"mimetype\", \"size\": 10}},\n \"owners\": {\"terms\": {\"field\": \"owner\", \"size\": 10}}\n },\n \"_source\": [\n \"filename\", \"mimetype\", \"page\", \"text\", \"source_url\",\n \"owner\", \"allowed_users\", \"allowed_groups\"\n ],\n \"size\": limit\n }\n print(f\"DEBUG BODY {body}\")\n if clauses:\n body[\"query\"][\"bool\"][\"filter\"] = clauses\n\n if isinstance(score_threshold, (int, float)) and score_threshold > 0:\n # top-level min_score (matches your tool)\n body[\"min_score\"] = score_threshold\n\n print(f\"DEBUG HYBRID BODY {body}\")\n resp = client.search(index=self.index_name, body=body)\n hits = resp.get(\"hits\", {}).get(\"hits\", [])\n return [\n {\n \"page_content\": hit[\"_source\"].get(\"text\", \"\"),\n \"metadata\": {k: v for k, v in hit[\"_source\"].items() if k != \"text\"},\n \"score\": hit.get(\"_score\"),\n }\n for hit in hits\n ]\n\n def search_documents(self) -> list[Data]:\n try:\n raw = self.search(self.search_query or \"\")\n return [\n Data(file_path=hit[\"metadata\"].get(\"file_path\", \"\"), text=hit[\"page_content\"])\n for hit in raw\n ]\n except Exception as e:\n print(f\"ERROR search_documents: {e}\")\n self.log(f\"search_documents error: {e}\")\n raise\n\n # -------- dynamic UI handling (auth switch) --------\n async def update_build_config(self, build_config: dict, field_value: str, field_name: str | None = None) -> dict:\n try:\n if field_name == \"auth_mode\":\n mode = (field_value or \"basic\").strip().lower()\n is_basic = mode == \"basic\"\n is_jwt = mode == \"jwt\"\n\n build_config[\"username\"][\"show\"] = is_basic\n build_config[\"password\"][\"show\"] = is_basic\n\n build_config[\"jwt_token\"][\"show\"] = is_jwt\n build_config[\"jwt_header\"][\"show\"] = is_jwt\n build_config[\"bearer_prefix\"][\"show\"] = is_jwt\n\n build_config[\"username\"][\"required\"] = is_basic\n build_config[\"password\"][\"required\"] = is_basic\n\n build_config[\"jwt_token\"][\"required\"] = is_jwt\n build_config[\"jwt_header\"][\"required\"] = is_jwt\n build_config[\"bearer_prefix\"][\"required\"] = False\n\n if is_basic:\n build_config[\"jwt_token\"][\"value\"] = \"\"\n\n return build_config\n\n return build_config\n\n except Exception as e:\n self.log(f\"update_build_config error: {e}\")\n return build_config\n" + }, + "embedding": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Embedding", + "dynamic": false, + "info": "", + "input_types": [ + "Embeddings" + ], + "list": false, + "list_add_label": "Add More", + "name": "embedding", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "filter_expression": { + "_input_type": "MultilineInput", + "advanced": false, + "copy_field": false, + "display_name": "Filter Expression (JSON)", + "dynamic": false, + "info": "Optional JSON to control filters/limit/score threshold.\nAccepted shapes:\n1) {\"filter\": [ {\"term\": {\"filename\":\"foo\"}}, {\"terms\":{\"owner\":[\"u1\",\"u2\"]}} ], \"limit\": 10, \"score_threshold\": 1.6 }\n2) Context-style maps: {\"data_sources\":[\"fileA\"], \"document_types\":[\"application/pdf\"], \"owners\":[\"123\"]}\nPlaceholders with __IMPOSSIBLE_VALUE__ are ignored.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "multiline": true, + "name": "filter_expression", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "index_name": { + "_input_type": "StrInput", + "advanced": false, + "display_name": "Index Name", + "dynamic": false, + "info": "The index to search.", + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "index_name", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "documents" + }, + "ingest_data": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Ingest Data", + "dynamic": false, + "info": "", + "input_types": [ + "Data", + "DataFrame" + ], + "list": true, + "list_add_label": "Add More", + "name": "ingest_data", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "jwt_header": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "JWT Header Name", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "jwt_header", + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "Authorization" + }, + "jwt_token": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "JWT Token", + "dynamic": false, + "info": "Paste a valid JWT (sent as a header).", + "input_types": [], + "load_from_db": true, + "name": "jwt_token", + "password": true, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "JWT" + }, + "number_of_results": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Default Size (limit)", + "dynamic": false, + "info": "Default number of hits when no limit provided in filter_expression.", + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "number_of_results", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": 4 + }, + "opensearch_url": { + "_input_type": "StrInput", + "advanced": false, + "display_name": "OpenSearch URL", + "dynamic": false, + "info": "URL for your OpenSearch cluster.", + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "opensearch_url", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "https://opensearch:9200" + }, + "password": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "Password", + "dynamic": false, + "info": "", + "input_types": [], + "load_from_db": false, + "name": "password", + "password": true, + "placeholder": "", + "required": false, + "show": false, + "title_case": false, + "type": "str", + "value": "o8h@xwLus123o" + }, + "search_query": { + "_input_type": "QueryInput", + "advanced": false, + "display_name": "Search Query", + "dynamic": false, + "info": "Enter a query to run a similarity search.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "search_query", + "placeholder": "Enter a query...", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "query", + "value": "" + }, + "should_cache_vector_store": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Cache Vector Store", + "dynamic": false, + "info": "If True, the vector store will be cached for the current build of the component. This is useful for components that have multiple output methods and want to share the same vector store.", + "list": false, + "list_add_label": "Add More", + "name": "should_cache_vector_store", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "use_ssl": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Use SSL", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "use_ssl", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": true + }, + "username": { + "_input_type": "StrInput", + "advanced": false, + "display_name": "Username", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "username", + "placeholder": "", + "required": false, + "show": false, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "admin" + }, + "vector_field": { + "_input_type": "StrInput", + "advanced": true, + "display_name": "Vector Field", + "dynamic": false, + "info": "Vector field used for KNN.", + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "vector_field", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "chunk_embedding" + }, + "verify_certs": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Verify Certificates", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "verify_certs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + } + }, + "tool_mode": false + }, + "selected_output": "dataframe", + "showNode": true, + "type": "OpenSearchHybrid" + }, + "dragging": false, + "id": "OpenSearch-iYfjf", + "measured": { + "height": 765, + "width": 320 + }, + "position": { + "x": 876.8213370559117, + "y": 136.89961010992386 + }, + "selected": false, + "type": "genericNode" + }, + { + "data": { + "id": "EmbeddingModel-eZ6bT", + "node": { + "base_classes": [ + "Embeddings" + ], + "beta": false, + "category": "models", + "conditional_paths": [], + "custom_fields": {}, + "description": "Generate embeddings using a specified provider.", + "display_name": "Embedding Model", + "documentation": "https://docs.langflow.org/components-embedding-models", + "edited": false, + "field_order": [ + "provider", + "model", + "api_key", + "api_base", + "dimensions", + "chunk_size", + "request_timeout", + "max_retries", + "show_progress_bar", + "model_kwargs" + ], + "frozen": false, + "icon": "binary", + "key": "EmbeddingModel", + "last_updated": "2025-09-04T20:03:12.867Z", + "legacy": false, + "lf_version": "1.5.0.post2", + "metadata": {}, + "minimized": false, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Embedding Model", + "group_outputs": false, + "method": "build_embeddings", + "name": "embeddings", + "options": null, + "required_inputs": null, + "selected": "Embeddings", + "tool_mode": true, + "types": [ + "Embeddings" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "score": 0.002833550413469189, + "template": { + "_type": "Component", + "api_base": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "API Base URL", + "dynamic": false, + "info": "Base URL for the API. Leave empty for default.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "api_base", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "Model Provider API key", + "input_types": [], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "real_time_refresh": true, + "required": true, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "chunk_size": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Chunk Size", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "chunk_size", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": 1000 + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import Any\n\nfrom langchain_openai import OpenAIEmbeddings\n\nfrom langflow.base.embeddings.model import LCEmbeddingsModel\nfrom langflow.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom langflow.field_typing import Embeddings\nfrom langflow.io import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageTextInput,\n SecretStrInput,\n)\nfrom langflow.schema.dotdict import dotdict\n\n\nclass EmbeddingModelComponent(LCEmbeddingsModel):\n display_name = \"Embedding Model\"\n description = \"Generate embeddings using a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-embedding-models\"\n icon = \"binary\"\n name = \"EmbeddingModel\"\n category = \"models\"\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\"],\n value=\"OpenAI\",\n info=\"Select the embedding model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}],\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=OPENAI_EMBEDDING_MODEL_NAMES[0],\n info=\"Select the embedding model to use\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=True,\n show=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"api_base\",\n display_name=\"API Base URL\",\n info=\"Base URL for the API. Leave empty for default.\",\n advanced=True,\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", advanced=True, value=3),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n provider = self.provider\n model = self.model\n api_key = self.api_key\n api_base = self.api_base\n dimensions = self.dimensions\n chunk_size = self.chunk_size\n request_timeout = self.request_timeout\n max_retries = self.max_retries\n show_progress_bar = self.show_progress_bar\n model_kwargs = self.model_kwargs or {}\n\n if provider == \"OpenAI\":\n if not api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n return OpenAIEmbeddings(\n model=model,\n dimensions=dimensions or None,\n base_url=api_base or None,\n api_key=api_key,\n chunk_size=chunk_size,\n max_retries=max_retries,\n timeout=request_timeout or None,\n show_progress_bar=show_progress_bar,\n model_kwargs=model_kwargs,\n )\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\" and field_value == \"OpenAI\":\n build_config[\"model\"][\"options\"] = OPENAI_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = OPENAI_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n build_config[\"api_base\"][\"display_name\"] = \"OpenAI API Base URL\"\n return build_config\n" + }, + "dimensions": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Dimensions", + "dynamic": false, + "info": "The number of dimensions the resulting output embeddings should have. Only supported by certain models.", + "list": false, + "list_add_label": "Add More", + "name": "dimensions", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": "" + }, + "max_retries": { + "_input_type": "IntInput", + "advanced": true, + "display_name": "Max Retries", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "max_retries", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "int", + "value": 3 + }, + "model": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "dialog_inputs": {}, + "display_name": "Model Name", + "dynamic": false, + "info": "Select the embedding model to use", + "name": "model", + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "options_metadata": [], + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "toggle": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "text-embedding-3-small" + }, + "model_kwargs": { + "_input_type": "DictInput", + "advanced": true, + "display_name": "Model Kwargs", + "dynamic": false, + "info": "Additional keyword arguments to pass to the model.", + "list": false, + "list_add_label": "Add More", + "name": "model_kwargs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "dict", + "value": {} + }, + "provider": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "dialog_inputs": {}, + "display_name": "Model Provider", + "dynamic": false, + "info": "Select the embedding model provider", + "name": "provider", + "options": [ + "OpenAI" + ], + "options_metadata": [ + { + "icon": "OpenAI" + } + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "toggle": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "request_timeout": { + "_input_type": "FloatInput", + "advanced": true, + "display_name": "Request Timeout", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "request_timeout", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "float", + "value": "" + }, + "show_progress_bar": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Show Progress Bar", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "show_progress_bar", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + } + }, + "tool_mode": false + }, + "showNode": true, + "type": "EmbeddingModel" + }, + "dragging": false, + "id": "EmbeddingModel-eZ6bT", + "measured": { + "height": 369, + "width": 320 + }, + "position": { + "x": 470.83083284244424, + "y": 223.40210317688985 + }, + "selected": false, + "type": "genericNode" + }, + { + "data": { + "id": "LanguageModelComponent-0YME7", + "node": { + "base_classes": [ + "LanguageModel", + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Runs a language model given a specified provider.", + "display_name": "Language Model", + "documentation": "https://docs.langflow.org/components-models", + "edited": false, + "field_order": [ + "provider", + "model_name", + "api_key", + "input_value", + "system_message", + "stream", + "temperature" + ], + "frozen": false, + "icon": "brain-circuit", + "last_updated": "2025-09-04T20:03:12.869Z", + "legacy": false, + "metadata": { + "code_hash": "6ac42a7167a4", + "dependencies": { + "dependencies": [ + { + "name": "langchain_anthropic", + "version": "0.3.14" + }, + { + "name": "langchain_google_genai", + "version": "2.0.6" + }, + { + "name": "langchain_openai", + "version": "0.3.23" + }, + { + "name": "langflow", + "version": "0.5.0.post2" + } + ], + "total_dependencies": 4 + }, + "keywords": [ + "model", + "llm", + "language model", + "large language model" + ], + "module": "langflow.components.models.language_model.LanguageModelComponent" + }, + "minimized": false, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Model Response", + "group_outputs": false, + "method": "text_response", + "name": "text_output", + "options": null, + "required_inputs": null, + "selected": "Message", + "tool_mode": true, + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + }, + { + "allows_loop": false, + "cache": true, + "display_name": "Language Model", + "group_outputs": false, + "method": "build_model", + "name": "model_output", + "options": null, + "required_inputs": null, + "selected": "LanguageModel", + "tool_mode": true, + "types": [ + "LanguageModel" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "priority": 0, + "template": { + "_type": "Component", + "api_key": { + "_input_type": "SecretStrInput", + "advanced": false, + "display_name": "OpenAI API Key", + "dynamic": false, + "info": "Model Provider API key", + "input_types": [], + "load_from_db": true, + "name": "api_key", + "password": true, + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "OPENAI_API_KEY" + }, + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from typing import Any\n\nfrom langchain_anthropic import ChatAnthropic\nfrom langchain_google_genai import ChatGoogleGenerativeAI\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.google_generative_ai_constants import GOOGLE_GENERATIVE_AI_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_CHAT_MODEL_NAMES, OPENAI_REASONING_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MultilineInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass LanguageModelComponent(LCModelComponent):\n display_name = \"Language Model\"\n description = \"Runs a language model given a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-models\"\n icon = \"brain-circuit\"\n category = \"models\"\n priority = 0 # Set priority to 0 to make it appear first\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\", \"Anthropic\", \"Google\"],\n value=\"OpenAI\",\n info=\"Select the model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}, {\"icon\": \"Anthropic\"}, {\"icon\": \"GoogleGenerativeAI\"}],\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=OPENAI_CHAT_MODEL_NAMES + OPENAI_REASONING_MODEL_NAMES,\n value=OPENAI_CHAT_MODEL_NAMES[0],\n info=\"Select the model to use\",\n real_time_refresh=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=False,\n show=True,\n real_time_refresh=True,\n ),\n MessageInput(\n name=\"input_value\",\n display_name=\"Input\",\n info=\"The input text to send to the model\",\n ),\n MultilineInput(\n name=\"system_message\",\n display_name=\"System Message\",\n info=\"A system message that helps set the behavior of the assistant\",\n advanced=False,\n ),\n BoolInput(\n name=\"stream\",\n display_name=\"Stream\",\n info=\"Whether to stream the response\",\n value=False,\n advanced=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Controls randomness in responses\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n advanced=True,\n ),\n ]\n\n def build_model(self) -> LanguageModel:\n provider = self.provider\n model_name = self.model_name\n temperature = self.temperature\n stream = self.stream\n\n if provider == \"OpenAI\":\n if not self.api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n\n if model_name in OPENAI_REASONING_MODEL_NAMES:\n # reasoning models do not support temperature (yet)\n temperature = None\n\n return ChatOpenAI(\n model_name=model_name,\n temperature=temperature,\n streaming=stream,\n openai_api_key=self.api_key,\n )\n if provider == \"Anthropic\":\n if not self.api_key:\n msg = \"Anthropic API key is required when using Anthropic provider\"\n raise ValueError(msg)\n return ChatAnthropic(\n model=model_name,\n temperature=temperature,\n streaming=stream,\n anthropic_api_key=self.api_key,\n )\n if provider == \"Google\":\n if not self.api_key:\n msg = \"Google API key is required when using Google provider\"\n raise ValueError(msg)\n return ChatGoogleGenerativeAI(\n model=model_name,\n temperature=temperature,\n streaming=stream,\n google_api_key=self.api_key,\n )\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\":\n if field_value == \"OpenAI\":\n build_config[\"model_name\"][\"options\"] = OPENAI_CHAT_MODEL_NAMES + OPENAI_REASONING_MODEL_NAMES\n build_config[\"model_name\"][\"value\"] = OPENAI_CHAT_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n elif field_value == \"Anthropic\":\n build_config[\"model_name\"][\"options\"] = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"value\"] = ANTHROPIC_MODELS[0]\n build_config[\"api_key\"][\"display_name\"] = \"Anthropic API Key\"\n elif field_value == \"Google\":\n build_config[\"model_name\"][\"options\"] = GOOGLE_GENERATIVE_AI_MODELS\n build_config[\"model_name\"][\"value\"] = GOOGLE_GENERATIVE_AI_MODELS[0]\n build_config[\"api_key\"][\"display_name\"] = \"Google API Key\"\n elif field_name == \"model_name\" and field_value.startswith(\"o1\") and self.provider == \"OpenAI\":\n # Hide system_message for o1 models - currently unsupported\n if \"system_message\" in build_config:\n build_config[\"system_message\"][\"show\"] = False\n elif field_name == \"model_name\" and not field_value.startswith(\"o1\") and \"system_message\" in build_config:\n build_config[\"system_message\"][\"show\"] = True\n return build_config\n" + }, + "input_value": { + "_input_type": "MessageInput", + "advanced": false, + "display_name": "Input", + "dynamic": false, + "info": "The input text to send to the model", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "input_value", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "model_name": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "dialog_inputs": {}, + "display_name": "Model Name", + "dynamic": false, + "info": "Select the model to use", + "name": "model_name", + "options": [ + "gpt-4o-mini", + "gpt-4o", + "gpt-4.1", + "gpt-4.1-mini", + "gpt-4.1-nano", + "gpt-4-turbo", + "gpt-4-turbo-preview", + "gpt-4", + "gpt-3.5-turbo", + "gpt-5", + "gpt-5-mini", + "gpt-5-nano", + "gpt-5-chat-latest", + "o1", + "o3-mini", + "o3", + "o3-pro", + "o4-mini", + "o4-mini-high" + ], + "options_metadata": [], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "toggle": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "gpt-4o-mini" + }, + "provider": { + "_input_type": "DropdownInput", + "advanced": false, + "combobox": false, + "dialog_inputs": {}, + "display_name": "Model Provider", + "dynamic": false, + "info": "Select the model provider", + "name": "provider", + "options": [ + "OpenAI", + "Anthropic", + "Google" + ], + "options_metadata": [ + { + "icon": "OpenAI" + }, + { + "icon": "Anthropic" + }, + { + "icon": "GoogleGenerativeAI" + } + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "toggle": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "str", + "value": "OpenAI" + }, + "stream": { + "_input_type": "BoolInput", + "advanced": true, + "display_name": "Stream", + "dynamic": false, + "info": "Whether to stream the response", + "list": false, + "list_add_label": "Add More", + "name": "stream", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "bool", + "value": false + }, + "system_message": { + "_input_type": "MultilineInput", + "advanced": false, + "copy_field": false, + "display_name": "System Message", + "dynamic": false, + "info": "A system message that helps set the behavior of the assistant", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "multiline": true, + "name": "system_message", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + }, + "temperature": { + "_input_type": "SliderInput", + "advanced": true, + "display_name": "Temperature", + "dynamic": false, + "info": "Controls randomness in responses", + "max_label": "", + "max_label_icon": "", + "min_label": "", + "min_label_icon": "", + "name": "temperature", + "placeholder": "", + "range_spec": { + "max": 1, + "min": 0, + "step": 0.01, + "step_type": "float" + }, + "required": false, + "show": true, + "slider_buttons": false, + "slider_buttons_options": [], + "slider_input": false, + "title_case": false, + "tool_mode": false, + "type": "slider", + "value": 0.1 + } + }, + "tool_mode": false + }, + "selected_output": "text_output", + "showNode": true, + "type": "LanguageModelComponent" + }, + "dragging": false, + "id": "LanguageModelComponent-0YME7", + "measured": { + "height": 534, + "width": 320 + }, + "position": { + "x": 2190.7561945382186, + "y": 449.90261812853623 + }, + "selected": false, + "type": "genericNode" + }, + { + "data": { + "id": "Prompt Template-Wo6kR", + "node": { + "base_classes": [ + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": { + "template": [ + "prompt", + "docs" + ] + }, + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt Template", + "documentation": "https://docs.langflow.org/components-prompts", + "edited": false, + "error": null, + "field_order": [ + "template", + "tool_placeholder" + ], + "frozen": false, + "full_path": null, + "icon": "braces", + "is_composition": null, + "is_input": null, + "is_output": null, + "legacy": false, + "metadata": { + "code_hash": "c478a572ccd3", + "dependencies": { + "dependencies": [ + { + "name": "langflow", + "version": "0.5.0.post2" + } + ], + "total_dependencies": 1 + }, + "module": "langflow.components.processing.prompt.PromptComponent" + }, + "minimized": false, + "name": "", + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Prompt", + "group_outputs": false, + "hidden": null, + "method": "build_prompt", + "name": "prompt", + "options": null, + "required_inputs": null, + "selected": "Message", + "tool_mode": true, + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "priority": 0, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom.custom_component.component import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt Template\"\n description: str = \"Create a prompt template with dynamic variables.\"\n documentation: str = \"https://docs.langflow.org/components-prompts\"\n icon = \"braces\"\n trace_type = \"prompt\"\n name = \"Prompt Template\"\n priority = 0 # Set priority to 0 to make it appear first\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n async def update_frontend_node(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = await super().update_frontend_node(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" + }, + "docs": { + "advanced": false, + "display_name": "docs", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "docs", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "prompt": { + "advanced": false, + "display_name": "prompt", + "dynamic": false, + "field_type": "str", + "fileTypes": [], + "file_path": "", + "info": "", + "input_types": [ + "Message" + ], + "list": false, + "load_from_db": false, + "multiline": true, + "name": "prompt", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "type": "str", + "value": "" + }, + "template": { + "_input_type": "PromptInput", + "advanced": false, + "display_name": "Template", + "dynamic": false, + "info": "", + "list": false, + "list_add_label": "Add More", + "name": "template", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "type": "prompt", + "value": "You are generating prompt nudges to help a user explore a corpus.\n\nTask:\n1) Skim the documents to infer common themes, entities, or tasks.\n2) Propose exactly three concise, distinct prompt nudges that encourage useful next queries.\n3) If the chat history is provided, use it to generate new questions that the user might have, based on the llm's response to his previous query. DO NOT repeat user questions.\n4) Make the nudges concise, close to 40 characters.\n5) The nudges are questions or commands that the user can make to the chatbot, which will respond looking at the corpus.\n4) Return strings only, separating the nudges by a newline. Don't include quotation marks.\n5) If any error occured, return blank. This will be used in production, so don't ask for more info or confirm a info like you're talking to me still. If, for some reason, you can't provide the nudges, your job failed and you just return blank.\nRules: Be brief. No duplicates. No explanations outside the strings of the nudges. English only.\n\nExamples:\n Show me this quarter's top 10 deals\n Summarize recent client interactions\n Search OpenSearch for mentions of our competitors\n\nChat history:\n{prompt}\n\nDocuments:\n{docs}\n\n" + }, + "tool_placeholder": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Tool Placeholder", + "dynamic": false, + "info": "A placeholder input for tool mode.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "tool_placeholder", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": true, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "" + } + }, + "tool_mode": false + }, + "showNode": true, + "type": "Prompt Template" + }, + "dragging": false, + "id": "Prompt Template-Wo6kR", + "measured": { + "height": 449, + "width": 320 + }, + "position": { + "x": 1669.0365272581178, + "y": 712.1086273287026 + }, + "selected": true, + "type": "genericNode" + }, + { + "data": { + "id": "ParserComponent-tZs7s", + "node": { + "base_classes": [ + "Message" + ], + "beta": false, + "conditional_paths": [], + "custom_fields": {}, + "description": "Extracts text using a template.", + "display_name": "Parser", + "documentation": "https://docs.langflow.org/components-processing#parser", + "edited": false, + "field_order": [ + "input_data", + "mode", + "pattern", + "sep" + ], + "frozen": false, + "icon": "braces", + "legacy": false, + "metadata": { + "code_hash": "556209520650", + "dependencies": { + "dependencies": [ + { + "name": "langflow", + "version": "0.5.0.post2" + } + ], + "total_dependencies": 1 + }, + "module": "langflow.components.processing.parser.ParserComponent" + }, + "minimized": false, + "output_types": [], + "outputs": [ + { + "allows_loop": false, + "cache": true, + "display_name": "Parsed Text", + "group_outputs": false, + "method": "parse_combined_text", + "name": "parsed_text", + "selected": "Message", + "tool_mode": true, + "types": [ + "Message" + ], + "value": "__UNDEFINED__" + } + ], + "pinned": false, + "template": { + "_type": "Component", + "code": { + "advanced": true, + "dynamic": true, + "fileTypes": [], + "file_path": "", + "info": "", + "list": false, + "load_from_db": false, + "multiline": true, + "name": "code", + "password": false, + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "type": "code", + "value": "from langflow.custom.custom_component.component import Component\nfrom langflow.helpers.data import safe_convert\nfrom langflow.inputs.inputs import BoolInput, HandleInput, MessageTextInput, MultilineInput, TabInput\nfrom langflow.schema.data import Data\nfrom langflow.schema.dataframe import DataFrame\nfrom langflow.schema.message import Message\nfrom langflow.template.field.base import Output\n\n\nclass ParserComponent(Component):\n display_name = \"Parser\"\n description = \"Extracts text using a template.\"\n documentation: str = \"https://docs.langflow.org/components-processing#parser\"\n icon = \"braces\"\n\n inputs = [\n HandleInput(\n name=\"input_data\",\n display_name=\"Data or DataFrame\",\n input_types=[\"DataFrame\", \"Data\"],\n info=\"Accepts either a DataFrame or a Data object.\",\n required=True,\n ),\n TabInput(\n name=\"mode\",\n display_name=\"Mode\",\n options=[\"Parser\", \"Stringify\"],\n value=\"Parser\",\n info=\"Convert into raw string instead of using a template.\",\n real_time_refresh=True,\n ),\n MultilineInput(\n name=\"pattern\",\n display_name=\"Template\",\n info=(\n \"Use variables within curly brackets to extract column values for DataFrames \"\n \"or key values for Data.\"\n \"For example: `Name: {Name}, Age: {Age}, Country: {Country}`\"\n ),\n value=\"Text: {text}\", # Example default\n dynamic=True,\n show=True,\n required=True,\n ),\n MessageTextInput(\n name=\"sep\",\n display_name=\"Separator\",\n advanced=True,\n value=\"\\n\",\n info=\"String used to separate rows/items.\",\n ),\n ]\n\n outputs = [\n Output(\n display_name=\"Parsed Text\",\n name=\"parsed_text\",\n info=\"Formatted text output.\",\n method=\"parse_combined_text\",\n ),\n ]\n\n def update_build_config(self, build_config, field_value, field_name=None):\n \"\"\"Dynamically hide/show `template` and enforce requirement based on `stringify`.\"\"\"\n if field_name == \"mode\":\n build_config[\"pattern\"][\"show\"] = self.mode == \"Parser\"\n build_config[\"pattern\"][\"required\"] = self.mode == \"Parser\"\n if field_value:\n clean_data = BoolInput(\n name=\"clean_data\",\n display_name=\"Clean Data\",\n info=(\n \"Enable to clean the data by removing empty rows and lines \"\n \"in each cell of the DataFrame/ Data object.\"\n ),\n value=True,\n advanced=True,\n required=False,\n )\n build_config[\"clean_data\"] = clean_data.to_dict()\n else:\n build_config.pop(\"clean_data\", None)\n\n return build_config\n\n def _clean_args(self):\n \"\"\"Prepare arguments based on input type.\"\"\"\n input_data = self.input_data\n\n match input_data:\n case list() if all(isinstance(item, Data) for item in input_data):\n msg = \"List of Data objects is not supported.\"\n raise ValueError(msg)\n case DataFrame():\n return input_data, None\n case Data():\n return None, input_data\n case dict() if \"data\" in input_data:\n try:\n if \"columns\" in input_data: # Likely a DataFrame\n return DataFrame.from_dict(input_data), None\n # Likely a Data object\n return None, Data(**input_data)\n except (TypeError, ValueError, KeyError) as e:\n msg = f\"Invalid structured input provided: {e!s}\"\n raise ValueError(msg) from e\n case _:\n msg = f\"Unsupported input type: {type(input_data)}. Expected DataFrame or Data.\"\n raise ValueError(msg)\n\n def parse_combined_text(self) -> Message:\n \"\"\"Parse all rows/items into a single text or convert input to string if `stringify` is enabled.\"\"\"\n # Early return for stringify option\n if self.mode == \"Stringify\":\n return self.convert_to_string()\n\n df, data = self._clean_args()\n\n lines = []\n if df is not None:\n for _, row in df.iterrows():\n formatted_text = self.pattern.format(**row.to_dict())\n lines.append(formatted_text)\n elif data is not None:\n formatted_text = self.pattern.format(**data.data)\n lines.append(formatted_text)\n\n combined_text = self.sep.join(lines)\n self.status = combined_text\n return Message(text=combined_text)\n\n def convert_to_string(self) -> Message:\n \"\"\"Convert input data to string with proper error handling.\"\"\"\n result = \"\"\n if isinstance(self.input_data, list):\n result = \"\\n\".join([safe_convert(item, clean_data=self.clean_data or False) for item in self.input_data])\n else:\n result = safe_convert(self.input_data or False)\n self.log(f\"Converted to string with length: {len(result)}\")\n\n message = Message(text=result)\n self.status = message\n return message\n" + }, + "input_data": { + "_input_type": "HandleInput", + "advanced": false, + "display_name": "Data or DataFrame", + "dynamic": false, + "info": "Accepts either a DataFrame or a Data object.", + "input_types": [ + "DataFrame", + "Data" + ], + "list": false, + "list_add_label": "Add More", + "name": "input_data", + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "trace_as_metadata": true, + "type": "other", + "value": "" + }, + "mode": { + "_input_type": "TabInput", + "advanced": false, + "display_name": "Mode", + "dynamic": false, + "info": "Convert into raw string instead of using a template.", + "name": "mode", + "options": [ + "Parser", + "Stringify" + ], + "placeholder": "", + "real_time_refresh": true, + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_metadata": true, + "type": "tab", + "value": "Parser" + }, + "pattern": { + "_input_type": "MultilineInput", + "advanced": false, + "copy_field": false, + "display_name": "Template", + "dynamic": true, + "info": "Use variables within curly brackets to extract column values for DataFrames or key values for Data.For example: `Name: {Name}, Age: {Age}, Country: {Country}`", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "multiline": true, + "name": "pattern", + "placeholder": "", + "required": true, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "{text}" + }, + "sep": { + "_input_type": "MessageTextInput", + "advanced": true, + "display_name": "Separator", + "dynamic": false, + "info": "String used to separate rows/items.", + "input_types": [ + "Message" + ], + "list": false, + "list_add_label": "Add More", + "load_from_db": false, + "name": "sep", + "placeholder": "", + "required": false, + "show": true, + "title_case": false, + "tool_mode": false, + "trace_as_input": true, + "trace_as_metadata": true, + "type": "str", + "value": "\n" + } + }, + "tool_mode": false + }, + "showNode": true, + "type": "ParserComponent" + }, + "dragging": false, + "id": "ParserComponent-tZs7s", + "measured": { + "height": 329, + "width": 320 + }, + "position": { + "x": 1282.0613788430787, + "y": 564.2200355777322 + }, + "selected": false, + "type": "genericNode" + } + ], + "viewport": { + "x": -187.81970119617426, + "y": 80.66031410799627, + "zoom": 0.5607296737841687 + } + }, + "description": "OpenRAG Open Search Nudges generator, based on the Open Search documents and the chat history.", + "endpoint_name": null, + "id": "ebc01d31-1976-46ce-a385-b0240327226c", + "is_component": false, + "last_tested_version": "1.5.0.post2", + "name": "OpenRAG Open Search Nudges", + "tags": [ + "assistants", + "agents" + ] +} \ No newline at end of file diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 91561b43..da0fb436 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -24,10 +24,12 @@ "@radix-ui/react-switch": "^1.2.5", "@tailwindcss/forms": "^0.5.10", "@tailwindcss/typography": "^0.5.16", + "@tanstack/react-query": "^5.86.0", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", "lucide-react": "^0.525.0", + "motion": "^12.23.12", "next": "15.3.5", "next-themes": "^0.4.6", "react": "^19.0.0", @@ -2388,6 +2390,32 @@ "node": ">=4" } }, + "node_modules/@tanstack/query-core": { + "version": "5.86.0", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.86.0.tgz", + "integrity": "sha512-Y6ibQm6BXbw6w1p3a5LrPn8Ae64M0dx7hGmnhrm9P+XAkCCKXOwZN0J5Z1wK/0RdNHtR9o+sWHDXd4veNI60tQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.86.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.86.0.tgz", + "integrity": "sha512-jgS/v0oSJkGHucv9zxOS8rL7mjATh1XO3K4eqAV4WMpAly8okcBrGi1YxRZN5S4B59F54x9JFjWrK5vMAvJYqA==", + "license": "MIT", + "dependencies": { + "@tanstack/query-core": "5.86.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, "node_modules/@tybys/wasm-util": { "version": "0.10.0", "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.0.tgz", @@ -4744,6 +4772,33 @@ "url": "https://github.com/sponsors/rawify" } }, + "node_modules/framer-motion": { + "version": "12.23.12", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.23.12.tgz", + "integrity": "sha512-6e78rdVtnBvlEVgu6eFEAgG9v3wLnYEboM8I5O5EXvfKC8gxGQB8wXJdhkMy10iVcn05jl6CNw7/HTsTCfwcWg==", + "license": "MIT", + "dependencies": { + "motion-dom": "^12.23.12", + "motion-utils": "^12.23.6", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -5903,6 +5958,47 @@ "node": ">=16 || 14 >=14.17" } }, + "node_modules/motion": { + "version": "12.23.12", + "resolved": "https://registry.npmjs.org/motion/-/motion-12.23.12.tgz", + "integrity": "sha512-8jCD8uW5GD1csOoqh1WhH1A6j5APHVE15nuBkFeRiMzYBdRwyAHmSP/oXSuW0WJPZRXTFdBoG4hY9TFWNhhwng==", + "license": "MIT", + "dependencies": { + "framer-motion": "^12.23.12", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/motion-dom": { + "version": "12.23.12", + "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.23.12.tgz", + "integrity": "sha512-RcR4fvMCTESQBD/uKQe49D5RUeDOokkGRmz4ceaJKDBgHYtZtntC/s2vLvY38gqGaytinij/yi3hMcWVcEF5Kw==", + "license": "MIT", + "dependencies": { + "motion-utils": "^12.23.6" + } + }, + "node_modules/motion-utils": { + "version": "12.23.6", + "resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-12.23.6.tgz", + "integrity": "sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==", + "license": "MIT" + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", diff --git a/frontend/package.json b/frontend/package.json index a7442d19..ac02091b 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -25,10 +25,12 @@ "@radix-ui/react-switch": "^1.2.5", "@tailwindcss/forms": "^0.5.10", "@tailwindcss/typography": "^0.5.16", + "@tanstack/react-query": "^5.86.0", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", "lucide-react": "^0.525.0", + "motion": "^12.23.12", "next": "15.3.5", "next-themes": "^0.4.6", "react": "^19.0.0", diff --git a/frontend/src/app/api/get-query-client.ts b/frontend/src/app/api/get-query-client.ts new file mode 100644 index 00000000..4602496c --- /dev/null +++ b/frontend/src/app/api/get-query-client.ts @@ -0,0 +1,37 @@ +import { + QueryClient, + defaultShouldDehydrateQuery, + isServer, +} from "@tanstack/react-query"; + +function makeQueryClient() { + return new QueryClient({ + defaultOptions: { + queries: { + staleTime: 60 * 1000, + }, + dehydrate: { + // include pending queries in dehydration + shouldDehydrateQuery: (query) => + defaultShouldDehydrateQuery(query) || + query.state.status === "pending", + }, + }, + }); +} + +let browserQueryClient: QueryClient | undefined = undefined; + +export function getQueryClient() { + if (isServer) { + // Server: always make a new query client + return makeQueryClient(); + } else { + // Browser: make a new query client if we don't already have one + // This is very important, so we don't re-make a new client if React + // suspends during the initial render. This may not be needed if we + // have a suspense boundary BELOW the creation of the query client + if (!browserQueryClient) browserQueryClient = makeQueryClient(); + return browserQueryClient; + } +} diff --git a/frontend/src/app/api/queries/useGetNudgesQuery.ts b/frontend/src/app/api/queries/useGetNudgesQuery.ts new file mode 100644 index 00000000..38cc9502 --- /dev/null +++ b/frontend/src/app/api/queries/useGetNudgesQuery.ts @@ -0,0 +1,46 @@ +import { + useQuery, + useQueryClient, + UseQueryOptions, +} from "@tanstack/react-query"; + +type Nudge = string; + +const DEFAULT_NUDGES = [ + "Show me this quarter's top 10 deals", + "Summarize recent client interactions", + "Search OpenSearch for mentions of our competitors", +]; + +export const useGetNudgesQuery = ( + chatId?: string | null, + options?: Omit, +) => { + const queryClient = useQueryClient(); + + function cancel() { + queryClient.removeQueries({ queryKey: ["nudges", chatId] }); + } + + async function getNudges(): Promise { + try { + const response = await fetch(`/api/nudges${chatId ? `/${chatId}` : ""}`); + const data = await response.json(); + return data.response.split("\n").filter(Boolean) || DEFAULT_NUDGES; + } catch (error) { + console.error("Error getting nudges", error); + return DEFAULT_NUDGES; + } + } + + const queryResult = useQuery( + { + queryKey: ["nudges", chatId], + queryFn: getNudges, + ...options, + }, + queryClient, + ); + + return { ...queryResult, cancel }; +}; diff --git a/frontend/src/app/chat/nudges.tsx b/frontend/src/app/chat/nudges.tsx new file mode 100644 index 00000000..c5929924 --- /dev/null +++ b/frontend/src/app/chat/nudges.tsx @@ -0,0 +1,46 @@ +import { motion, AnimatePresence } from "motion/react"; + +export default function Nudges({ + nudges, + handleSuggestionClick, +}: { + nudges: string[]; + + handleSuggestionClick: (suggestion: string) => void; +}) { + return ( +
+ + {nudges.length > 0 && ( + +
+
+
+ {nudges.map((suggestion: string, index: number) => ( + + ))} +
+ {/* Fade out gradient on the right */} +
+
+
+
+ )} +
+
+ ); +} diff --git a/frontend/src/app/chat/page.tsx b/frontend/src/app/chat/page.tsx index c3fb39b0..8b3b48df 100644 --- a/frontend/src/app/chat/page.tsx +++ b/frontend/src/app/chat/page.tsx @@ -1,12 +1,5 @@ "use client"; -import { ProtectedRoute } from "@/components/protected-route"; -import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; -import { Button } from "@/components/ui/button"; -import { useAuth } from "@/contexts/auth-context"; -import { EndpointType, useChat } from "@/contexts/chat-context"; -import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context"; -import { useTask } from "@/contexts/task-context"; import { AtSign, Bot, @@ -22,6 +15,15 @@ import { Zap, } from "lucide-react"; import { useEffect, useRef, useState } from "react"; +import { ProtectedRoute } from "@/components/protected-route"; +import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; +import { Button } from "@/components/ui/button"; +import { useAuth } from "@/contexts/auth-context"; +import { type EndpointType, useChat } from "@/contexts/chat-context"; +import { useKnowledgeFilter } from "@/contexts/knowledge-filter-context"; +import { useTask } from "@/contexts/task-context"; +import { useGetNudgesQuery } from "../api/queries/useGetNudgesQuery"; +import Nudges from "./nudges"; interface Message { role: "user" | "assistant"; @@ -190,7 +192,7 @@ function ChatPage() { "Upload failed with status:", response.status, "Response:", - errorText + errorText, ); throw new Error("Failed to process document"); } @@ -243,7 +245,7 @@ function ChatPage() { ...prev, [endpoint]: result.response_id, })); - + // If this is a new conversation (no currentConversationId), set it now if (!currentConversationId) { setCurrentConversationId(result.response_id); @@ -435,8 +437,8 @@ function ChatPage() { // 2. It's different from the last loaded conversation AND // 3. User is not in the middle of an interaction if ( - conversationData && - conversationData.messages && + conversationData && + conversationData.messages && lastLoadedConversationRef.current !== conversationData.response_id && !isUserInteracting && !isForkingInProgress @@ -444,7 +446,7 @@ function ChatPage() { console.log( "Loading conversation with", conversationData.messages.length, - "messages" + "messages", ); // Convert backend message format to frontend Message interface const convertedMessages: Message[] = conversationData.messages.map( @@ -454,8 +456,21 @@ function ChatPage() { timestamp?: string; response_id?: string; chunks?: Array<{ - item?: { type?: string; tool_name?: string; id?: string; inputs?: unknown; results?: unknown; status?: string }; - delta?: { tool_calls?: Array<{ id?: string; function?: { name?: string; arguments?: string }; type?: string }> }; + item?: { + type?: string; + tool_name?: string; + id?: string; + inputs?: unknown; + results?: unknown; + status?: string; + }; + delta?: { + tool_calls?: Array<{ + id?: string; + function?: { name?: string; arguments?: string }; + type?: string; + }>; + }; type?: string; result?: unknown; output?: unknown; @@ -488,10 +503,15 @@ function ChatPage() { functionCalls.push({ id: toolCall.id || "", name: toolCall.tool_name || "unknown", - arguments: (toolCall.inputs as Record) || {}, + arguments: + (toolCall.inputs as Record) || {}, argumentsString: JSON.stringify(toolCall.inputs || {}), - result: toolCall.results as Record | ToolCallResult[], - status: (toolCall.status as "pending" | "completed" | "error") || "completed", + result: toolCall.results as + | Record + | ToolCallResult[], + status: + (toolCall.status as "pending" | "completed" | "error") || + "completed", type: "tool_call", }); } @@ -502,7 +522,9 @@ function ChatPage() { functionCalls.push({ id: toolCall.id || "", name: toolCall.function.name || "unknown", - arguments: toolCall.function.arguments ? JSON.parse(toolCall.function.arguments) : {}, + arguments: toolCall.function.arguments + ? JSON.parse(toolCall.function.arguments) + : {}, argumentsString: toolCall.function.arguments || "", status: "completed", type: toolCall.type || "function", @@ -511,10 +533,17 @@ function ChatPage() { } } // Process tool call results from chunks - if (chunk.type === "response.tool_call.result" || chunk.type === "tool_call_result") { + if ( + chunk.type === "response.tool_call.result" || + chunk.type === "tool_call_result" + ) { const lastCall = functionCalls[functionCalls.length - 1]; if (lastCall) { - lastCall.result = (chunk.result as Record | ToolCallResult[]) || (chunk as Record); + lastCall.result = + (chunk.result as + | Record + | ToolCallResult[]) || + (chunk as Record); lastCall.status = "completed"; } } @@ -522,19 +551,31 @@ function ChatPage() { } // Process response_data (non-streaming data) - if (msg.response_data && typeof msg.response_data === 'object') { + if (msg.response_data && typeof msg.response_data === "object") { // Look for tool_calls in various places in the response data - const responseData = typeof msg.response_data === 'string' ? JSON.parse(msg.response_data) : msg.response_data; - - if (responseData.tool_calls && Array.isArray(responseData.tool_calls)) { + const responseData = + typeof msg.response_data === "string" + ? JSON.parse(msg.response_data) + : msg.response_data; + + if ( + responseData.tool_calls && + Array.isArray(responseData.tool_calls) + ) { for (const toolCall of responseData.tool_calls) { functionCalls.push({ id: toolCall.id, name: toolCall.function?.name || toolCall.name, - arguments: toolCall.function?.arguments || toolCall.arguments, - argumentsString: typeof (toolCall.function?.arguments || toolCall.arguments) === 'string' - ? toolCall.function?.arguments || toolCall.arguments - : JSON.stringify(toolCall.function?.arguments || toolCall.arguments), + arguments: + toolCall.function?.arguments || toolCall.arguments, + argumentsString: + typeof ( + toolCall.function?.arguments || toolCall.arguments + ) === "string" + ? toolCall.function?.arguments || toolCall.arguments + : JSON.stringify( + toolCall.function?.arguments || toolCall.arguments, + ), result: toolCall.result, status: "completed", type: toolCall.type || "function", @@ -552,7 +593,7 @@ function ChatPage() { } return message; - } + }, ); setMessages(convertedMessages); @@ -641,7 +682,7 @@ function ChatPage() { console.log( "Chat page received file upload error event:", filename, - error + error, ); // Replace the last message with error message @@ -655,37 +696,37 @@ function ChatPage() { window.addEventListener( "fileUploadStart", - handleFileUploadStart as EventListener + handleFileUploadStart as EventListener, ); window.addEventListener( "fileUploaded", - handleFileUploaded as EventListener + handleFileUploaded as EventListener, ); window.addEventListener( "fileUploadComplete", - handleFileUploadComplete as EventListener + handleFileUploadComplete as EventListener, ); window.addEventListener( "fileUploadError", - handleFileUploadError as EventListener + handleFileUploadError as EventListener, ); return () => { window.removeEventListener( "fileUploadStart", - handleFileUploadStart as EventListener + handleFileUploadStart as EventListener, ); window.removeEventListener( "fileUploaded", - handleFileUploaded as EventListener + handleFileUploaded as EventListener, ); window.removeEventListener( "fileUploadComplete", - handleFileUploadComplete as EventListener + handleFileUploadComplete as EventListener, ); window.removeEventListener( "fileUploadError", - handleFileUploadError as EventListener + handleFileUploadError as EventListener, ); }; }, [endpoint, setPreviousResponseIds]); @@ -711,6 +752,10 @@ function ChatPage() { }; }, [isFilterDropdownOpen]); + const { data: nudges = [], cancel: cancelNudges } = useGetNudgesQuery( + previousResponseIds[endpoint], + ); + const handleSSEStream = async (userMessage: Message) => { const apiEndpoint = endpoint === "chat" ? "/api/chat" : "/api/langflow"; @@ -813,7 +858,7 @@ function ChatPage() { console.log( "Received chunk:", chunk.type || chunk.object, - chunk + chunk, ); // Extract response ID if present @@ -829,14 +874,14 @@ function ChatPage() { if (chunk.delta.function_call) { console.log( "Function call in delta:", - chunk.delta.function_call + chunk.delta.function_call, ); // Check if this is a new function call if (chunk.delta.function_call.name) { console.log( "New function call:", - chunk.delta.function_call.name + chunk.delta.function_call.name, ); const functionCall: FunctionCall = { name: chunk.delta.function_call.name, @@ -852,7 +897,7 @@ function ChatPage() { else if (chunk.delta.function_call.arguments) { console.log( "Function call arguments delta:", - chunk.delta.function_call.arguments + chunk.delta.function_call.arguments, ); const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1]; @@ -864,14 +909,14 @@ function ChatPage() { chunk.delta.function_call.arguments; console.log( "Accumulated arguments:", - lastFunctionCall.argumentsString + lastFunctionCall.argumentsString, ); // Try to parse arguments if they look complete if (lastFunctionCall.argumentsString.includes("}")) { try { const parsed = JSON.parse( - lastFunctionCall.argumentsString + lastFunctionCall.argumentsString, ); lastFunctionCall.arguments = parsed; lastFunctionCall.status = "completed"; @@ -879,7 +924,7 @@ function ChatPage() { } catch (e) { console.log( "Arguments not yet complete or invalid JSON:", - e + e, ); } } @@ -912,7 +957,7 @@ function ChatPage() { else if (toolCall.function.arguments) { console.log( "Tool call arguments delta:", - toolCall.function.arguments + toolCall.function.arguments, ); const lastFunctionCall = currentFunctionCalls[ @@ -926,7 +971,7 @@ function ChatPage() { toolCall.function.arguments; console.log( "Accumulated tool arguments:", - lastFunctionCall.argumentsString + lastFunctionCall.argumentsString, ); // Try to parse arguments if they look complete @@ -935,7 +980,7 @@ function ChatPage() { ) { try { const parsed = JSON.parse( - lastFunctionCall.argumentsString + lastFunctionCall.argumentsString, ); lastFunctionCall.arguments = parsed; lastFunctionCall.status = "completed"; @@ -943,7 +988,7 @@ function ChatPage() { } catch (e) { console.log( "Tool arguments not yet complete or invalid JSON:", - e + e, ); } } @@ -975,7 +1020,7 @@ function ChatPage() { console.log( "Error parsing function call on finish:", fc, - e + e, ); } } @@ -991,12 +1036,12 @@ function ChatPage() { console.log( "🟢 CREATING function call (added):", chunk.item.id, - chunk.item.tool_name || chunk.item.name + chunk.item.tool_name || chunk.item.name, ); // Try to find an existing pending call to update (created by earlier deltas) let existing = currentFunctionCalls.find( - (fc) => fc.id === chunk.item.id + (fc) => fc.id === chunk.item.id, ); if (!existing) { existing = [...currentFunctionCalls] @@ -1005,7 +1050,7 @@ function ChatPage() { (fc) => fc.status === "pending" && !fc.id && - fc.name === (chunk.item.tool_name || chunk.item.name) + fc.name === (chunk.item.tool_name || chunk.item.name), ); } @@ -1018,7 +1063,7 @@ function ChatPage() { chunk.item.inputs || existing.arguments; console.log( "🟢 UPDATED existing pending function call with id:", - existing.id + existing.id, ); } else { const functionCall: FunctionCall = { @@ -1036,7 +1081,7 @@ function ChatPage() { currentFunctionCalls.map((fc) => ({ id: fc.id, name: fc.name, - })) + })), ); } } @@ -1047,7 +1092,7 @@ function ChatPage() { ) { console.log( "Function args delta (Realtime API):", - chunk.delta + chunk.delta, ); const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1]; @@ -1058,7 +1103,7 @@ function ChatPage() { lastFunctionCall.argumentsString += chunk.delta || ""; console.log( "Accumulated arguments (Realtime API):", - lastFunctionCall.argumentsString + lastFunctionCall.argumentsString, ); } } @@ -1069,26 +1114,26 @@ function ChatPage() { ) { console.log( "Function args done (Realtime API):", - chunk.arguments + chunk.arguments, ); const lastFunctionCall = currentFunctionCalls[currentFunctionCalls.length - 1]; if (lastFunctionCall) { try { lastFunctionCall.arguments = JSON.parse( - chunk.arguments || "{}" + chunk.arguments || "{}", ); lastFunctionCall.status = "completed"; console.log( "Parsed function arguments (Realtime API):", - lastFunctionCall.arguments + lastFunctionCall.arguments, ); } catch (e) { lastFunctionCall.arguments = { raw: chunk.arguments }; lastFunctionCall.status = "error"; console.log( "Error parsing function arguments (Realtime API):", - e + e, ); } } @@ -1102,14 +1147,14 @@ function ChatPage() { console.log( "🔵 UPDATING function call (done):", chunk.item.id, - chunk.item.tool_name || chunk.item.name + chunk.item.tool_name || chunk.item.name, ); console.log( "🔵 Looking for existing function calls:", currentFunctionCalls.map((fc) => ({ id: fc.id, name: fc.name, - })) + })), ); // Find existing function call by ID or name @@ -1117,14 +1162,14 @@ function ChatPage() { (fc) => fc.id === chunk.item.id || fc.name === chunk.item.tool_name || - fc.name === chunk.item.name + fc.name === chunk.item.name, ); if (functionCall) { console.log( "🔵 FOUND existing function call, updating:", functionCall.id, - functionCall.name + functionCall.name, ); // Update existing function call with completion data functionCall.status = @@ -1147,7 +1192,7 @@ function ChatPage() { "🔴 WARNING: Could not find existing function call to update:", chunk.item.id, chunk.item.tool_name, - chunk.item.name + chunk.item.name, ); } } @@ -1168,7 +1213,7 @@ function ChatPage() { fc.name === chunk.item.name || fc.name === chunk.item.type || fc.name.includes(chunk.item.type.replace("_call", "")) || - chunk.item.type.includes(fc.name) + chunk.item.type.includes(fc.name), ); if (functionCall) { @@ -1212,12 +1257,12 @@ function ChatPage() { "🟡 CREATING tool call (added):", chunk.item.id, chunk.item.tool_name || chunk.item.name, - chunk.item.type + chunk.item.type, ); // Dedupe by id or pending with same name let existing = currentFunctionCalls.find( - (fc) => fc.id === chunk.item.id + (fc) => fc.id === chunk.item.id, ); if (!existing) { existing = [...currentFunctionCalls] @@ -1229,7 +1274,7 @@ function ChatPage() { fc.name === (chunk.item.tool_name || chunk.item.name || - chunk.item.type) + chunk.item.type), ); } @@ -1245,7 +1290,7 @@ function ChatPage() { chunk.item.inputs || existing.arguments; console.log( "🟡 UPDATED existing pending tool call with id:", - existing.id + existing.id, ); } else { const functionCall = { @@ -1266,7 +1311,7 @@ function ChatPage() { id: fc.id, name: fc.name, type: fc.type, - })) + })), ); } } @@ -1362,6 +1407,9 @@ function ChatPage() { if (!controller.signal.aborted && thisStreamId === streamIdRef.current) { setMessages((prev) => [...prev, finalMessage]); setStreamingMessage(null); + if (previousResponseIds[endpoint]) { + cancelNudges(); + } } // Store the response ID for the next request for this endpoint @@ -1374,7 +1422,7 @@ function ChatPage() { ...prev, [endpoint]: newResponseId, })); - + // If this is a new conversation (no currentConversationId), set it now if (!currentConversationId) { setCurrentConversationId(newResponseId); @@ -1402,13 +1450,12 @@ function ChatPage() { } }; - const handleSubmit = async (e: React.FormEvent) => { - e.preventDefault(); - if (!input.trim() || loading) return; + const handleSendMessage = async (inputMessage: string) => { + if (!inputMessage.trim() || loading) return; const userMessage: Message = { role: "user", - content: input.trim(), + content: inputMessage.trim(), timestamp: new Date(), }; @@ -1479,6 +1526,9 @@ function ChatPage() { timestamp: new Date(), }; setMessages((prev) => [...prev, assistantMessage]); + if (result.response_id) { + cancelNudges(); + } // Store the response ID if present for this endpoint if (result.response_id) { @@ -1486,7 +1536,7 @@ function ChatPage() { ...prev, [endpoint]: result.response_id, })); - + // If this is a new conversation (no currentConversationId), set it now if (!currentConversationId) { setCurrentConversationId(result.response_id); @@ -1520,6 +1570,11 @@ function ChatPage() { setLoading(false); }; + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + handleSendMessage(input); + }; + const toggleFunctionCall = (functionCallId: string) => { setExpandedFunctionCalls((prev) => { const newSet = new Set(prev); @@ -1534,7 +1589,7 @@ function ChatPage() { const handleForkConversation = ( messageIndex: number, - event?: React.MouseEvent + event?: React.MouseEvent, ) => { // Prevent any default behavior and stop event propagation if (event) { @@ -1599,7 +1654,7 @@ function ChatPage() { const renderFunctionCalls = ( functionCalls: FunctionCall[], - messageIndex?: number + messageIndex?: number, ) => { if (!functionCalls || functionCalls.length === 0) return null; @@ -1828,15 +1883,8 @@ function ChatPage() { ); }; - const suggestionChips = [ - "Show me this quarter's top 10 deals", - "Summarize recent client interactions", - "Search OpenSearch for mentions of our competitors", - ]; - const handleSuggestionClick = (suggestion: string) => { - setInput(suggestion); - inputRef.current?.focus(); + handleSendMessage(suggestion); }; return ( @@ -1974,7 +2022,7 @@ function ChatPage() {
{renderFunctionCalls( message.functionCalls || [], - index + index, )}

{message.content} @@ -2005,7 +2053,7 @@ function ChatPage() {

{renderFunctionCalls( streamingMessage.functionCalls, - messages.length + messages.length, )}

{streamingMessage.content} @@ -2052,27 +2100,14 @@ function ChatPage() { {/* Suggestion chips - always show unless streaming */} {!streamingMessage && ( -

-
-
- {suggestionChips.map((suggestion, index) => ( - - ))} -
- {/* Fade out gradient on the right */} -
-
-
+ )} {/* Input Area - Fixed at bottom */} -
+
@@ -2161,7 +2196,7 @@ function ChatPage() { const filteredFilters = availableFilters.filter((filter) => filter.name .toLowerCase() - .includes(filterSearchTerm.toLowerCase()) + .includes(filterSearchTerm.toLowerCase()), ); if (e.key === "Escape") { @@ -2179,7 +2214,7 @@ function ChatPage() { if (e.key === "ArrowDown") { e.preventDefault(); setSelectedFilterIndex((prev) => - prev < filteredFilters.length - 1 ? prev + 1 : 0 + prev < filteredFilters.length - 1 ? prev + 1 : 0, ); return; } @@ -2187,7 +2222,7 @@ function ChatPage() { if (e.key === "ArrowUp") { e.preventDefault(); setSelectedFilterIndex((prev) => - prev > 0 ? prev - 1 : filteredFilters.length - 1 + prev > 0 ? prev - 1 : filteredFilters.length - 1, ); return; } @@ -2205,7 +2240,7 @@ function ChatPage() { ) { e.preventDefault(); handleFilterSelect( - filteredFilters[selectedFilterIndex] + filteredFilters[selectedFilterIndex], ); return; } @@ -2224,7 +2259,7 @@ function ChatPage() { ) { e.preventDefault(); handleFilterSelect( - filteredFilters[selectedFilterIndex] + filteredFilters[selectedFilterIndex], ); return; } @@ -2304,7 +2339,7 @@ function ChatPage() { .filter((filter) => filter.name .toLowerCase() - .includes(filterSearchTerm.toLowerCase()) + .includes(filterSearchTerm.toLowerCase()), ) .map((filter, index) => (