fix: disable upload message when ingesting on onboarding, wait for file to be ingested, added knowledge filters on nudges (#345)
* Removed upload start message * Made onboarding upload refetch nudges and only finish when document is ingested * Implemented query filters on nudges * changed get to post * Implemented filtering for documents that are not sample data on nudges --------- Co-authored-by: Sebastián Estévez <estevezsebastian@gmail.com>
This commit is contained in:
parent
d97c41bd7f
commit
a5d25e0c0b
9 changed files with 447 additions and 133 deletions
|
|
@ -7,7 +7,7 @@
|
|||
"data": {
|
||||
"sourceHandle": {
|
||||
"dataType": "EmbeddingModel",
|
||||
"id": "EmbeddingModel-eZ6bT",
|
||||
"id": "EmbeddingModel-rofSg",
|
||||
"name": "embeddings",
|
||||
"output_types": [
|
||||
"Embeddings"
|
||||
|
|
@ -15,19 +15,19 @@
|
|||
},
|
||||
"targetHandle": {
|
||||
"fieldName": "embedding",
|
||||
"id": "OpenSearch-iYfjf",
|
||||
"id": "OpenSearchVectorStoreComponent-JA880",
|
||||
"inputTypes": [
|
||||
"Embeddings"
|
||||
],
|
||||
"type": "other"
|
||||
}
|
||||
},
|
||||
"id": "xy-edge__EmbeddingModel-eZ6bT{œdataTypeœ:œEmbeddingModelœ,œidœ:œEmbeddingModel-eZ6bTœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-OpenSearch-iYfjf{œfieldNameœ:œembeddingœ,œidœ:œOpenSearch-iYfjfœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}",
|
||||
"id": "reactflow__edge-EmbeddingModel-rofSg{œdataTypeœ:œEmbeddingModelœ,œidœ:œEmbeddingModel-rofSgœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}-OpenSearchVectorStoreComponent-JA880{œfieldNameœ:œembeddingœ,œidœ:œOpenSearchVectorStoreComponent-JA880œ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}",
|
||||
"selected": false,
|
||||
"source": "EmbeddingModel-eZ6bT",
|
||||
"sourceHandle": "{œdataTypeœ:œEmbeddingModelœ,œidœ:œEmbeddingModel-eZ6bTœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}",
|
||||
"target": "OpenSearch-iYfjf",
|
||||
"targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œOpenSearch-iYfjfœ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}"
|
||||
"source": "EmbeddingModel-rofSg",
|
||||
"sourceHandle": "{œdataTypeœ:œEmbeddingModelœ,œidœ:œEmbeddingModel-rofSgœ,œnameœ:œembeddingsœ,œoutput_typesœ:[œEmbeddingsœ]}",
|
||||
"target": "OpenSearchVectorStoreComponent-JA880",
|
||||
"targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œOpenSearchVectorStoreComponent-JA880œ,œinputTypesœ:[œEmbeddingsœ],œtypeœ:œotherœ}"
|
||||
},
|
||||
{
|
||||
"animated": false,
|
||||
|
|
@ -35,7 +35,7 @@
|
|||
"data": {
|
||||
"sourceHandle": {
|
||||
"dataType": "ParserComponent",
|
||||
"id": "ParserComponent-tZs7s",
|
||||
"id": "ParserComponent-uMBcK",
|
||||
"name": "parsed_text",
|
||||
"output_types": [
|
||||
"Message"
|
||||
|
|
@ -43,19 +43,19 @@
|
|||
},
|
||||
"targetHandle": {
|
||||
"fieldName": "docs",
|
||||
"id": "Prompt Template-Wo6kR",
|
||||
"id": "Prompt Template-OoRfU",
|
||||
"inputTypes": [
|
||||
"Message"
|
||||
],
|
||||
"type": "str"
|
||||
}
|
||||
},
|
||||
"id": "xy-edge__ParserComponent-tZs7s{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-tZs7sœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}-Prompt Template-Wo6kR{œfieldNameœ:œdocsœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
|
||||
"id": "reactflow__edge-ParserComponent-uMBcK{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-uMBcKœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}-Prompt Template-OoRfU{œfieldNameœ:œdocsœ,œidœ:œPrompt Template-OoRfUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
|
||||
"selected": false,
|
||||
"source": "ParserComponent-tZs7s",
|
||||
"sourceHandle": "{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-tZs7sœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "Prompt Template-Wo6kR",
|
||||
"targetHandle": "{œfieldNameœ:œdocsœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
|
||||
"source": "ParserComponent-uMBcK",
|
||||
"sourceHandle": "{œdataTypeœ:œParserComponentœ,œidœ:œParserComponent-uMBcKœ,œnameœ:œparsed_textœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "Prompt Template-OoRfU",
|
||||
"targetHandle": "{œfieldNameœ:œdocsœ,œidœ:œPrompt Template-OoRfUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
|
||||
},
|
||||
{
|
||||
"animated": false,
|
||||
|
|
@ -63,7 +63,7 @@
|
|||
"data": {
|
||||
"sourceHandle": {
|
||||
"dataType": "ChatInput",
|
||||
"id": "ChatInput-bqH7H",
|
||||
"id": "ChatInput-kYraY",
|
||||
"name": "message",
|
||||
"output_types": [
|
||||
"Message"
|
||||
|
|
@ -71,19 +71,19 @@
|
|||
},
|
||||
"targetHandle": {
|
||||
"fieldName": "prompt",
|
||||
"id": "Prompt Template-Wo6kR",
|
||||
"id": "Prompt Template-OoRfU",
|
||||
"inputTypes": [
|
||||
"Message"
|
||||
],
|
||||
"type": "str"
|
||||
}
|
||||
},
|
||||
"id": "xy-edge__ChatInput-bqH7H{œdataTypeœ:œChatInputœ,œidœ:œChatInput-bqH7Hœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt Template-Wo6kR{œfieldNameœ:œpromptœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
|
||||
"id": "reactflow__edge-ChatInput-kYraY{œdataTypeœ:œChatInputœ,œidœ:œChatInput-kYraYœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt Template-OoRfU{œfieldNameœ:œpromptœ,œidœ:œPrompt Template-OoRfUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
|
||||
"selected": false,
|
||||
"source": "ChatInput-bqH7H",
|
||||
"sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-bqH7Hœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "Prompt Template-Wo6kR",
|
||||
"targetHandle": "{œfieldNameœ:œpromptœ,œidœ:œPrompt Template-Wo6kRœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
|
||||
"source": "ChatInput-kYraY",
|
||||
"sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-kYraYœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "Prompt Template-OoRfU",
|
||||
"targetHandle": "{œfieldNameœ:œpromptœ,œidœ:œPrompt Template-OoRfUœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
|
||||
},
|
||||
{
|
||||
"animated": false,
|
||||
|
|
@ -91,7 +91,7 @@
|
|||
"data": {
|
||||
"sourceHandle": {
|
||||
"dataType": "LanguageModelComponent",
|
||||
"id": "LanguageModelComponent-NSTA6",
|
||||
"id": "LanguageModelComponent-dxtYP",
|
||||
"name": "text_output",
|
||||
"output_types": [
|
||||
"Message"
|
||||
|
|
@ -99,7 +99,7 @@
|
|||
},
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "ChatOutput-BMVN5",
|
||||
"id": "ChatOutput-S4nKr",
|
||||
"inputTypes": [
|
||||
"Data",
|
||||
"DataFrame",
|
||||
|
|
@ -108,12 +108,12 @@
|
|||
"type": "other"
|
||||
}
|
||||
},
|
||||
"id": "xy-edge__LanguageModelComponent-NSTA6{œdataTypeœ:œLanguageModelComponentœ,œidœ:œLanguageModelComponent-NSTA6œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-BMVN5{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-BMVN5œ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}",
|
||||
"id": "reactflow__edge-LanguageModelComponent-dxtYP{œdataTypeœ:œLanguageModelComponentœ,œidœ:œLanguageModelComponent-dxtYPœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-S4nKr{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-S4nKrœ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}",
|
||||
"selected": false,
|
||||
"source": "LanguageModelComponent-NSTA6",
|
||||
"sourceHandle": "{œdataTypeœ:œLanguageModelComponentœ,œidœ:œLanguageModelComponent-NSTA6œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "ChatOutput-BMVN5",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-BMVN5œ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}"
|
||||
"source": "LanguageModelComponent-dxtYP",
|
||||
"sourceHandle": "{œdataTypeœ:œLanguageModelComponentœ,œidœ:œLanguageModelComponent-dxtYPœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "ChatOutput-S4nKr",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-S4nKrœ,œinputTypesœ:[œDataœ,œDataFrameœ,œMessageœ],œtypeœ:œotherœ}"
|
||||
},
|
||||
{
|
||||
"animated": false,
|
||||
|
|
@ -121,7 +121,7 @@
|
|||
"data": {
|
||||
"sourceHandle": {
|
||||
"dataType": "Prompt Template",
|
||||
"id": "Prompt Template-Wo6kR",
|
||||
"id": "Prompt Template-OoRfU",
|
||||
"name": "prompt",
|
||||
"output_types": [
|
||||
"Message"
|
||||
|
|
@ -129,19 +129,19 @@
|
|||
},
|
||||
"targetHandle": {
|
||||
"fieldName": "input_value",
|
||||
"id": "LanguageModelComponent-NSTA6",
|
||||
"id": "LanguageModelComponent-dxtYP",
|
||||
"inputTypes": [
|
||||
"Message"
|
||||
],
|
||||
"type": "str"
|
||||
}
|
||||
},
|
||||
"id": "xy-edge__Prompt Template-Wo6kR{œdataTypeœ:œPrompt Templateœ,œidœ:œPrompt Template-Wo6kRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-LanguageModelComponent-NSTA6{œfieldNameœ:œinput_valueœ,œidœ:œLanguageModelComponent-NSTA6œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
|
||||
"id": "reactflow__edge-Prompt Template-OoRfU{œdataTypeœ:œPrompt Templateœ,œidœ:œPrompt Template-OoRfUœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-LanguageModelComponent-dxtYP{œfieldNameœ:œinput_valueœ,œidœ:œLanguageModelComponent-dxtYPœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
|
||||
"selected": false,
|
||||
"source": "Prompt Template-Wo6kR",
|
||||
"sourceHandle": "{œdataTypeœ:œPrompt Templateœ,œidœ:œPrompt Template-Wo6kRœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "LanguageModelComponent-NSTA6",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œLanguageModelComponent-NSTA6œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
|
||||
"source": "Prompt Template-OoRfU",
|
||||
"sourceHandle": "{œdataTypeœ:œPrompt Templateœ,œidœ:œPrompt Template-OoRfUœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "LanguageModelComponent-dxtYP",
|
||||
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œLanguageModelComponent-dxtYPœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
|
||||
},
|
||||
{
|
||||
"animated": false,
|
||||
|
|
@ -149,7 +149,7 @@
|
|||
"data": {
|
||||
"sourceHandle": {
|
||||
"dataType": "OpenSearchVectorStoreComponent",
|
||||
"id": "OpenSearch-iYfjf",
|
||||
"id": "OpenSearchVectorStoreComponent-JA880",
|
||||
"name": "dataframe",
|
||||
"output_types": [
|
||||
"DataFrame"
|
||||
|
|
@ -157,7 +157,7 @@
|
|||
},
|
||||
"targetHandle": {
|
||||
"fieldName": "input_data",
|
||||
"id": "ParserComponent-tZs7s",
|
||||
"id": "ParserComponent-uMBcK",
|
||||
"inputTypes": [
|
||||
"DataFrame",
|
||||
"Data"
|
||||
|
|
@ -165,12 +165,39 @@
|
|||
"type": "other"
|
||||
}
|
||||
},
|
||||
"id": "xy-edge__OpenSearch-iYfjf{œdataTypeœ:œOpenSearchVectorStoreComponentœ,œidœ:œOpenSearch-iYfjfœ,œnameœ:œdataframeœ,œoutput_typesœ:[œDataFrameœ]}-ParserComponent-tZs7s{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-tZs7sœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}",
|
||||
"id": "reactflow__edge-OpenSearchVectorStoreComponent-JA880{œdataTypeœ:œOpenSearchVectorStoreComponentœ,œidœ:œOpenSearchVectorStoreComponent-JA880œ,œnameœ:œdataframeœ,œoutput_typesœ:[œDataFrameœ]}-ParserComponent-uMBcK{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-uMBcKœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}",
|
||||
"selected": false,
|
||||
"source": "OpenSearch-iYfjf",
|
||||
"sourceHandle": "{œdataTypeœ:œOpenSearchVectorStoreComponentœ,œidœ:œOpenSearch-iYfjfœ,œnameœ:œdataframeœ,œoutput_typesœ:[œDataFrameœ]}",
|
||||
"target": "ParserComponent-tZs7s",
|
||||
"targetHandle": "{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-tZs7sœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}"
|
||||
"source": "OpenSearchVectorStoreComponent-JA880",
|
||||
"sourceHandle": "{œdataTypeœ:œOpenSearchVectorStoreComponentœ,œidœ:œOpenSearchVectorStoreComponent-JA880œ,œnameœ:œdataframeœ,œoutput_typesœ:[œDataFrameœ]}",
|
||||
"target": "ParserComponent-uMBcK",
|
||||
"targetHandle": "{œfieldNameœ:œinput_dataœ,œidœ:œParserComponent-uMBcKœ,œinputTypesœ:[œDataFrameœ,œDataœ],œtypeœ:œotherœ}"
|
||||
},
|
||||
{
|
||||
"animated": false,
|
||||
"data": {
|
||||
"sourceHandle": {
|
||||
"dataType": "TextInput",
|
||||
"id": "TextInput-OcjB6",
|
||||
"name": "text",
|
||||
"output_types": [
|
||||
"Message"
|
||||
]
|
||||
},
|
||||
"targetHandle": {
|
||||
"fieldName": "filter_expression",
|
||||
"id": "OpenSearchVectorStoreComponent-JA880",
|
||||
"inputTypes": [
|
||||
"Message"
|
||||
],
|
||||
"type": "str"
|
||||
}
|
||||
},
|
||||
"id": "xy-edge__TextInput-OcjB6{œdataTypeœ:œTextInputœ,œidœ:œTextInput-OcjB6œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-OpenSearchVectorStoreComponent-JA880{œfieldNameœ:œfilter_expressionœ,œidœ:œOpenSearchVectorStoreComponent-JA880œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
|
||||
"selected": false,
|
||||
"source": "TextInput-OcjB6",
|
||||
"sourceHandle": "{œdataTypeœ:œTextInputœ,œidœ:œTextInput-OcjB6œ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}",
|
||||
"target": "OpenSearchVectorStoreComponent-JA880",
|
||||
"targetHandle": "{œfieldNameœ:œfilter_expressionœ,œidœ:œOpenSearchVectorStoreComponent-JA880œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}"
|
||||
}
|
||||
],
|
||||
"nodes": [
|
||||
|
|
@ -178,7 +205,7 @@
|
|||
"data": {
|
||||
"description": "Get chat inputs from the Playground.",
|
||||
"display_name": "Chat Input",
|
||||
"id": "ChatInput-bqH7H",
|
||||
"id": "ChatInput-kYraY",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"Message"
|
||||
|
|
@ -419,9 +446,9 @@
|
|||
"type": "ChatInput"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "ChatInput-bqH7H",
|
||||
"id": "ChatInput-kYraY",
|
||||
"measured": {
|
||||
"height": 48,
|
||||
"height": 57,
|
||||
"width": 192
|
||||
},
|
||||
"position": {
|
||||
|
|
@ -435,7 +462,7 @@
|
|||
"data": {
|
||||
"description": "Display a chat message in the Playground.",
|
||||
"display_name": "Chat Output",
|
||||
"id": "ChatOutput-BMVN5",
|
||||
"id": "ChatOutput-S4nKr",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"Message"
|
||||
|
|
@ -680,9 +707,9 @@
|
|||
"type": "ChatOutput"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "ChatOutput-BMVN5",
|
||||
"id": "ChatOutput-S4nKr",
|
||||
"measured": {
|
||||
"height": 48,
|
||||
"height": 57,
|
||||
"width": 192
|
||||
},
|
||||
"position": {
|
||||
|
|
@ -694,7 +721,7 @@
|
|||
},
|
||||
{
|
||||
"data": {
|
||||
"id": "OpenSearch-iYfjf",
|
||||
"id": "OpenSearchVectorStoreComponent-JA880",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"Data",
|
||||
|
|
@ -1303,7 +1330,7 @@
|
|||
"type": "OpenSearchVectorStoreComponent"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "OpenSearch-iYfjf",
|
||||
"id": "OpenSearchVectorStoreComponent-JA880",
|
||||
"measured": {
|
||||
"height": 822,
|
||||
"width": 320
|
||||
|
|
@ -1317,7 +1344,7 @@
|
|||
},
|
||||
{
|
||||
"data": {
|
||||
"id": "EmbeddingModel-eZ6bT",
|
||||
"id": "EmbeddingModel-rofSg",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"Embeddings"
|
||||
|
|
@ -1343,7 +1370,7 @@
|
|||
],
|
||||
"frozen": false,
|
||||
"icon": "binary",
|
||||
"last_updated": "2025-10-01T20:14:43.010Z",
|
||||
"last_updated": "2025-11-03T19:42:29.170Z",
|
||||
"legacy": false,
|
||||
"metadata": {
|
||||
"code_hash": "8607e963fdef",
|
||||
|
|
@ -1614,9 +1641,9 @@
|
|||
"type": "EmbeddingModel"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "EmbeddingModel-eZ6bT",
|
||||
"id": "EmbeddingModel-rofSg",
|
||||
"measured": {
|
||||
"height": 369,
|
||||
"height": 378,
|
||||
"width": 320
|
||||
},
|
||||
"position": {
|
||||
|
|
@ -1628,7 +1655,7 @@
|
|||
},
|
||||
{
|
||||
"data": {
|
||||
"id": "Prompt Template-Wo6kR",
|
||||
"id": "Prompt Template-OoRfU",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"Message"
|
||||
|
|
@ -1801,7 +1828,7 @@
|
|||
"type": "Prompt Template"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "Prompt Template-Wo6kR",
|
||||
"id": "Prompt Template-OoRfU",
|
||||
"measured": {
|
||||
"height": 449,
|
||||
"width": 320
|
||||
|
|
@ -1815,7 +1842,7 @@
|
|||
},
|
||||
{
|
||||
"data": {
|
||||
"id": "ParserComponent-tZs7s",
|
||||
"id": "ParserComponent-uMBcK",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"Message"
|
||||
|
|
@ -1987,7 +2014,7 @@
|
|||
"type": "ParserComponent"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "ParserComponent-tZs7s",
|
||||
"id": "ParserComponent-uMBcK",
|
||||
"measured": {
|
||||
"height": 329,
|
||||
"width": 320
|
||||
|
|
@ -2001,7 +2028,7 @@
|
|||
},
|
||||
{
|
||||
"data": {
|
||||
"id": "LanguageModelComponent-NSTA6",
|
||||
"id": "LanguageModelComponent-dxtYP",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"LanguageModel",
|
||||
|
|
@ -2025,7 +2052,7 @@
|
|||
],
|
||||
"frozen": false,
|
||||
"icon": "brain-circuit",
|
||||
"last_updated": "2025-10-01T20:14:43.010Z",
|
||||
"last_updated": "2025-11-03T19:42:41.996Z",
|
||||
"legacy": false,
|
||||
"metadata": {
|
||||
"code_hash": "bb5f8714781b",
|
||||
|
|
@ -2200,7 +2227,7 @@
|
|||
"tool_mode": false,
|
||||
"trace_as_metadata": true,
|
||||
"type": "str",
|
||||
"value": "gpt-4o-mini"
|
||||
"value": "gpt-4o"
|
||||
},
|
||||
"provider": {
|
||||
"_input_type": "DropdownInput",
|
||||
|
|
@ -2318,7 +2345,7 @@
|
|||
"type": "LanguageModelComponent"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "LanguageModelComponent-NSTA6",
|
||||
"id": "LanguageModelComponent-dxtYP",
|
||||
"measured": {
|
||||
"height": 534,
|
||||
"width": 320
|
||||
|
|
@ -2329,22 +2356,122 @@
|
|||
},
|
||||
"selected": false,
|
||||
"type": "genericNode"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"id": "TextInput-OcjB6",
|
||||
"node": {
|
||||
"base_classes": [
|
||||
"Message"
|
||||
],
|
||||
"beta": false,
|
||||
"conditional_paths": [],
|
||||
"custom_fields": {},
|
||||
"description": "Get user text inputs.",
|
||||
"display_name": "Text Input",
|
||||
"documentation": "https://docs.langflow.org/components-io#text-input",
|
||||
"edited": true,
|
||||
"field_order": [
|
||||
"input_value"
|
||||
],
|
||||
"frozen": false,
|
||||
"icon": "type",
|
||||
"legacy": false,
|
||||
"lf_version": "1.6.0",
|
||||
"metadata": {},
|
||||
"minimized": false,
|
||||
"output_types": [],
|
||||
"outputs": [
|
||||
{
|
||||
"allows_loop": false,
|
||||
"cache": true,
|
||||
"display_name": "Output Text",
|
||||
"group_outputs": false,
|
||||
"hidden": null,
|
||||
"method": "text_response",
|
||||
"name": "text",
|
||||
"options": null,
|
||||
"required_inputs": null,
|
||||
"selected": "Message",
|
||||
"tool_mode": true,
|
||||
"types": [
|
||||
"Message"
|
||||
],
|
||||
"value": "__UNDEFINED__"
|
||||
}
|
||||
],
|
||||
"pinned": false,
|
||||
"template": {
|
||||
"_type": "Component",
|
||||
"code": {
|
||||
"advanced": true,
|
||||
"dynamic": true,
|
||||
"fileTypes": [],
|
||||
"file_path": "",
|
||||
"info": "",
|
||||
"list": false,
|
||||
"load_from_db": false,
|
||||
"multiline": true,
|
||||
"name": "code",
|
||||
"password": false,
|
||||
"placeholder": "",
|
||||
"required": true,
|
||||
"show": true,
|
||||
"title_case": false,
|
||||
"type": "code",
|
||||
"value": "from langflow.base.io.text import TextComponent\nfrom langflow.io import SecretStrInput, Output\nfrom langflow.schema.message import Message\n\n\nclass TextInputComponent(TextComponent):\n display_name = \"Text Input\"\n description = \"Get user text inputs.\"\n documentation: str = \"https://docs.langflow.org/components-io#text-input\"\n icon = \"type\"\n name = \"TextInput\"\n\n inputs = [\n SecretStrInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Text to be passed as input.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Output Text\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n return Message(\n text=self.input_value,\n )\n"
|
||||
},
|
||||
"input_value": {
|
||||
"_input_type": "SecretStrInput",
|
||||
"advanced": false,
|
||||
"display_name": "Text",
|
||||
"dynamic": false,
|
||||
"info": "Text to be passed as input.",
|
||||
"input_types": [],
|
||||
"load_from_db": true,
|
||||
"name": "input_value",
|
||||
"password": true,
|
||||
"placeholder": "",
|
||||
"required": false,
|
||||
"show": true,
|
||||
"title_case": false,
|
||||
"type": "str",
|
||||
"value": "OPENRAG-QUERY-FILTER"
|
||||
}
|
||||
},
|
||||
"tool_mode": false
|
||||
},
|
||||
"showNode": true,
|
||||
"type": "TextInput"
|
||||
},
|
||||
"dragging": false,
|
||||
"id": "TextInput-OcjB6",
|
||||
"measured": {
|
||||
"height": 204,
|
||||
"width": 320
|
||||
},
|
||||
"position": {
|
||||
"x": 474.45113226548517,
|
||||
"y": 697.8781609303677
|
||||
},
|
||||
"selected": false,
|
||||
"type": "genericNode"
|
||||
}
|
||||
],
|
||||
"viewport": {
|
||||
"x": -196.5855347896957,
|
||||
"y": 78.13813811398654,
|
||||
"zoom": 0.5380793988167256
|
||||
"x": -195.10601766576656,
|
||||
"y": 70.4638443501205,
|
||||
"zoom": 0.5524404935324336
|
||||
}
|
||||
},
|
||||
"description": "OpenRAG OpenSearch Nudges generator, based on the OpenSearch documents and the chat history.",
|
||||
"endpoint_name": null,
|
||||
"id": "ebc01d31-1976-46ce-a385-b0240327226c",
|
||||
"is_component": false,
|
||||
"last_tested_version": "1.6.0",
|
||||
"last_tested_version": "1.6.5.dev9",
|
||||
"name": "OpenRAG OpenSearch Nudges",
|
||||
"tags": [
|
||||
"assistants",
|
||||
"agents"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -122,11 +122,6 @@ export async function uploadFile(
|
|||
file: File,
|
||||
replace = false
|
||||
): Promise<UploadFileResult> {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent("fileUploadStart", {
|
||||
detail: { filename: file.name },
|
||||
})
|
||||
);
|
||||
|
||||
try {
|
||||
const formData = new FormData();
|
||||
|
|
|
|||
|
|
@ -1,47 +1,83 @@
|
|||
import {
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
UseQueryOptions,
|
||||
type UseQueryOptions,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
} from "@tanstack/react-query";
|
||||
|
||||
type Nudge = string;
|
||||
|
||||
const DEFAULT_NUDGES: Nudge[] = [];
|
||||
|
||||
export interface NudgeFilters {
|
||||
data_sources?: string[];
|
||||
document_types?: string[];
|
||||
owners?: string[];
|
||||
}
|
||||
|
||||
export interface NudgeQueryParams {
|
||||
chatId?: string | null;
|
||||
filters?: NudgeFilters;
|
||||
limit?: number;
|
||||
scoreThreshold?: number;
|
||||
}
|
||||
|
||||
export const useGetNudgesQuery = (
|
||||
chatId?: string | null,
|
||||
options?: Omit<UseQueryOptions, "queryKey" | "queryFn">
|
||||
params: NudgeQueryParams | null = {},
|
||||
options?: Omit<UseQueryOptions, "queryKey" | "queryFn">,
|
||||
) => {
|
||||
const queryClient = useQueryClient();
|
||||
const { chatId, filters, limit, scoreThreshold } = params ?? {};
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
function cancel() {
|
||||
queryClient.removeQueries({ queryKey: ["nudges", chatId] });
|
||||
}
|
||||
function cancel() {
|
||||
queryClient.removeQueries({ queryKey: ["nudges", chatId, filters, limit, scoreThreshold] });
|
||||
}
|
||||
|
||||
async function getNudges(): Promise<Nudge[]> {
|
||||
try {
|
||||
const response = await fetch(`/api/nudges${chatId ? `/${chatId}` : ""}`);
|
||||
const data = await response.json();
|
||||
async function getNudges(): Promise<Nudge[]> {
|
||||
try {
|
||||
const requestBody: {
|
||||
filters?: NudgeFilters;
|
||||
limit?: number;
|
||||
score_threshold?: number;
|
||||
} = {};
|
||||
|
||||
if (data.response && typeof data.response === "string") {
|
||||
return data.response.split("\n").filter(Boolean);
|
||||
}
|
||||
if (filters) {
|
||||
requestBody.filters = filters;
|
||||
}
|
||||
if (limit !== undefined) {
|
||||
requestBody.limit = limit;
|
||||
}
|
||||
if (scoreThreshold !== undefined) {
|
||||
requestBody.score_threshold = scoreThreshold;
|
||||
}
|
||||
|
||||
return DEFAULT_NUDGES;
|
||||
} catch (error) {
|
||||
console.error("Error getting nudges", error);
|
||||
return DEFAULT_NUDGES;
|
||||
}
|
||||
}
|
||||
const response = await fetch(`/api/nudges${chatId ? `/${chatId}` : ""}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
const data = await response.json();
|
||||
|
||||
const queryResult = useQuery(
|
||||
{
|
||||
queryKey: ["nudges", chatId],
|
||||
queryFn: getNudges,
|
||||
...options,
|
||||
},
|
||||
queryClient
|
||||
);
|
||||
if (data.response && typeof data.response === "string") {
|
||||
return data.response.split("\n").filter(Boolean);
|
||||
}
|
||||
|
||||
return { ...queryResult, cancel };
|
||||
return DEFAULT_NUDGES;
|
||||
} catch (error) {
|
||||
console.error("Error getting nudges", error);
|
||||
return DEFAULT_NUDGES;
|
||||
}
|
||||
}
|
||||
|
||||
const queryResult = useQuery(
|
||||
{
|
||||
queryKey: ["nudges", chatId, filters, limit, scoreThreshold],
|
||||
queryFn: getNudges,
|
||||
...options,
|
||||
},
|
||||
queryClient,
|
||||
);
|
||||
|
||||
return { ...queryResult, cancel };
|
||||
};
|
||||
|
|
|
|||
|
|
@ -588,32 +588,12 @@ function ChatPage() {
|
|||
setLoading(true);
|
||||
setIsUploading(true);
|
||||
setUploadedFile(null); // Clear previous file
|
||||
|
||||
// Add initial upload message
|
||||
const uploadStartMessage: Message = {
|
||||
role: "assistant",
|
||||
content: `🔄 Starting upload of **${filename}**...`,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
setMessages((prev) => [...prev, uploadStartMessage]);
|
||||
};
|
||||
|
||||
const handleFileUploaded = (event: CustomEvent) => {
|
||||
const { result } = event.detail;
|
||||
console.log("Chat page received file upload event:", result);
|
||||
|
||||
// Replace the last message with upload complete message
|
||||
const uploadMessage: Message = {
|
||||
role: "assistant",
|
||||
content: `📄 Document uploaded: **${result.filename}** (${
|
||||
result.pages
|
||||
} pages, ${result.content_length.toLocaleString()} characters)\n\n${
|
||||
result.confirmation
|
||||
}`,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
|
||||
setMessages((prev) => [...prev.slice(0, -1), uploadMessage]);
|
||||
setUploadedFile(null); // Clear file after upload
|
||||
|
||||
// Update the response ID for this endpoint
|
||||
|
|
@ -708,8 +688,38 @@ function ChatPage() {
|
|||
return () => clearInterval(interval);
|
||||
}, []);
|
||||
|
||||
// Prepare filters for nudges (same as chat)
|
||||
const processedFiltersForNudges = parsedFilterData?.filters
|
||||
? (() => {
|
||||
const filters = parsedFilterData.filters;
|
||||
const processed: SelectedFilters = {
|
||||
data_sources: [],
|
||||
document_types: [],
|
||||
owners: [],
|
||||
};
|
||||
processed.data_sources = filters.data_sources.includes("*")
|
||||
? []
|
||||
: filters.data_sources;
|
||||
processed.document_types = filters.document_types.includes("*")
|
||||
? []
|
||||
: filters.document_types;
|
||||
processed.owners = filters.owners.includes("*") ? [] : filters.owners;
|
||||
|
||||
const hasFilters =
|
||||
processed.data_sources.length > 0 ||
|
||||
processed.document_types.length > 0 ||
|
||||
processed.owners.length > 0;
|
||||
return hasFilters ? processed : undefined;
|
||||
})()
|
||||
: undefined;
|
||||
|
||||
const { data: nudges = [], cancel: cancelNudges } = useGetNudgesQuery(
|
||||
previousResponseIds[endpoint],
|
||||
{
|
||||
chatId: previousResponseIds[endpoint],
|
||||
filters: processedFiltersForNudges,
|
||||
limit: parsedFilterData?.limit ?? 3,
|
||||
scoreThreshold: parsedFilterData?.scoreThreshold ?? 0,
|
||||
},
|
||||
{
|
||||
enabled: isOnboardingComplete, // Only fetch nudges after onboarding is complete
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import { AnimatePresence, motion } from "motion/react";
|
||||
import { type ChangeEvent, useRef, useState } from "react";
|
||||
import { type ChangeEvent, useEffect, useRef, useState } from "react";
|
||||
import { useGetNudgesQuery } from "@/app/api/queries/useGetNudgesQuery";
|
||||
import { useGetTasksQuery } from "@/app/api/queries/useGetTasksQuery";
|
||||
import { AnimatedProviderSteps } from "@/app/onboarding/components/animated-provider-steps";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { uploadFile } from "@/lib/upload-utils";
|
||||
|
|
@ -15,6 +17,46 @@ const OnboardingUpload = ({ onComplete }: OnboardingUploadProps) => {
|
|||
|
||||
const STEP_LIST = ["Uploading your document", "Processing your document"];
|
||||
|
||||
// Query tasks to track completion
|
||||
const { data: tasks } = useGetTasksQuery({
|
||||
enabled: currentStep !== null, // Only poll when upload has started
|
||||
refetchInterval: currentStep !== null ? 1000 : false, // Poll every 1 second during upload
|
||||
});
|
||||
|
||||
const { refetch: refetchNudges } = useGetNudgesQuery(null);
|
||||
|
||||
// Monitor tasks and call onComplete when file processing is done
|
||||
useEffect(() => {
|
||||
if (currentStep === null || !tasks) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if there are any active tasks (pending, running, or processing)
|
||||
const activeTasks = tasks.find(
|
||||
(task) =>
|
||||
task.status === "pending" ||
|
||||
task.status === "running" ||
|
||||
task.status === "processing",
|
||||
);
|
||||
|
||||
// If no active tasks and we have more than 1 task (initial + new upload), complete it
|
||||
if (
|
||||
(!activeTasks || (activeTasks.processed_files ?? 0) > 0) &&
|
||||
tasks.length > 1
|
||||
) {
|
||||
// Set to final step to show "Done"
|
||||
setCurrentStep(STEP_LIST.length);
|
||||
|
||||
// Refetch nudges to get new ones
|
||||
refetchNudges();
|
||||
|
||||
// Wait a bit before completing
|
||||
setTimeout(() => {
|
||||
onComplete();
|
||||
}, 1000);
|
||||
}
|
||||
}, [tasks, currentStep, onComplete, refetchNudges]);
|
||||
|
||||
const resetFileInput = () => {
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = "";
|
||||
|
|
@ -30,15 +72,17 @@ const OnboardingUpload = ({ onComplete }: OnboardingUploadProps) => {
|
|||
try {
|
||||
setCurrentStep(0);
|
||||
await uploadFile(file, true);
|
||||
console.log("Document uploaded successfully");
|
||||
console.log("Document upload task started successfully");
|
||||
// Move to processing step - task monitoring will handle completion
|
||||
setTimeout(() => {
|
||||
setCurrentStep(1);
|
||||
}, 1500);
|
||||
} catch (error) {
|
||||
console.error("Upload failed", (error as Error).message);
|
||||
// Reset on error
|
||||
setCurrentStep(null);
|
||||
} finally {
|
||||
setIsUploading(false);
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
setCurrentStep(STEP_LIST.length);
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
onComplete();
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -12,9 +12,23 @@ async def nudges_from_kb_endpoint(request: Request, chat_service, session_manage
|
|||
jwt_token = session_manager.get_effective_jwt_token(user_id, request.state.jwt_token)
|
||||
|
||||
try:
|
||||
# Parse request body for filters
|
||||
body = {}
|
||||
try:
|
||||
body = await request.json()
|
||||
except Exception:
|
||||
body = {}
|
||||
|
||||
filters = body.get("filters")
|
||||
limit = body.get("limit")
|
||||
score_threshold = body.get("score_threshold")
|
||||
|
||||
result = await chat_service.langflow_nudges_chat(
|
||||
user_id,
|
||||
jwt_token,
|
||||
filters=filters,
|
||||
limit=limit,
|
||||
score_threshold=score_threshold,
|
||||
)
|
||||
return JSONResponse(result)
|
||||
except Exception as e:
|
||||
|
|
@ -32,10 +46,24 @@ async def nudges_from_chat_id_endpoint(request: Request, chat_service, session_m
|
|||
jwt_token = session_manager.get_effective_jwt_token(user_id, request.state.jwt_token)
|
||||
|
||||
try:
|
||||
# Parse request body for filters
|
||||
body = {}
|
||||
try:
|
||||
body = await request.json()
|
||||
except Exception:
|
||||
body = {}
|
||||
|
||||
filters = body.get("filters")
|
||||
limit = body.get("limit")
|
||||
score_threshold = body.get("score_threshold")
|
||||
|
||||
result = await chat_service.langflow_nudges_chat(
|
||||
user_id,
|
||||
jwt_token,
|
||||
previous_response_id=chat_id,
|
||||
filters=filters,
|
||||
limit=limit,
|
||||
score_threshold=score_threshold,
|
||||
)
|
||||
return JSONResponse(result)
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -370,6 +370,7 @@ async def _ingest_default_documents_langflow(services, file_paths):
|
|||
{"key": "owner_name", "value": anonymous_user.name},
|
||||
{"key": "owner_email", "value": anonymous_user.email},
|
||||
{"key": "connector_type", "value": "system_default"},
|
||||
{"key": "is_sample_data", "value": "true"},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -413,6 +414,7 @@ async def _ingest_default_documents_openrag(services, file_paths):
|
|||
jwt_token=None,
|
||||
owner_name=None,
|
||||
owner_email=None,
|
||||
is_sample_data=True, # Mark as sample data
|
||||
)
|
||||
|
||||
task_id = await services["task_service"].create_custom_task(
|
||||
|
|
@ -1057,7 +1059,7 @@ async def create_app():
|
|||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["GET"],
|
||||
methods=["POST"],
|
||||
),
|
||||
Route(
|
||||
"/nudges/{chat_id}",
|
||||
|
|
@ -1068,7 +1070,7 @@ async def create_app():
|
|||
session_manager=services["session_manager"],
|
||||
)
|
||||
),
|
||||
methods=["GET"],
|
||||
methods=["POST"],
|
||||
),
|
||||
Route(
|
||||
"/reset-flow/{flow_type}",
|
||||
|
|
|
|||
|
|
@ -157,6 +157,7 @@ class TaskProcessor:
|
|||
file_size: int = None,
|
||||
connector_type: str = "local",
|
||||
embedding_model: str = None,
|
||||
is_sample_data: bool = False,
|
||||
):
|
||||
"""
|
||||
Standard processing pipeline for non-Langflow processors:
|
||||
|
|
@ -240,6 +241,10 @@ class TaskProcessor:
|
|||
chunk_doc["owner_name"] = owner_name
|
||||
if owner_email is not None:
|
||||
chunk_doc["owner_email"] = owner_email
|
||||
|
||||
# Mark as sample data if specified
|
||||
if is_sample_data:
|
||||
chunk_doc["is_sample_data"] = "true"
|
||||
chunk_id = f"{file_hash}_{i}"
|
||||
try:
|
||||
await opensearch_client.index(
|
||||
|
|
@ -286,12 +291,14 @@ class DocumentFileProcessor(TaskProcessor):
|
|||
jwt_token: str = None,
|
||||
owner_name: str = None,
|
||||
owner_email: str = None,
|
||||
is_sample_data: bool = False,
|
||||
):
|
||||
super().__init__(document_service)
|
||||
self.owner_user_id = owner_user_id
|
||||
self.jwt_token = jwt_token
|
||||
self.owner_name = owner_name
|
||||
self.owner_email = owner_email
|
||||
self.is_sample_data = is_sample_data
|
||||
|
||||
async def process_item(
|
||||
self, upload_task: UploadTask, item: str, file_task: FileTask
|
||||
|
|
@ -326,6 +333,7 @@ class DocumentFileProcessor(TaskProcessor):
|
|||
owner_email=self.owner_email,
|
||||
file_size=file_size,
|
||||
connector_type="local",
|
||||
is_sample_data=self.is_sample_data,
|
||||
)
|
||||
|
||||
file_task.status = TaskStatus.COMPLETED
|
||||
|
|
|
|||
|
|
@ -158,8 +158,11 @@ class ChatService:
|
|||
user_id: str = None,
|
||||
jwt_token: str = None,
|
||||
previous_response_id: str = None,
|
||||
filters: dict = None,
|
||||
limit: int = None,
|
||||
score_threshold: float = None,
|
||||
):
|
||||
"""Handle Langflow chat requests"""
|
||||
"""Handle Langflow nudges chat requests with knowledge filters"""
|
||||
|
||||
if not LANGFLOW_URL or not NUDGES_FLOW_ID:
|
||||
raise ValueError(
|
||||
|
|
@ -171,6 +174,67 @@ class ChatService:
|
|||
if jwt_token:
|
||||
extra_headers["X-LANGFLOW-GLOBAL-VAR-JWT"] = jwt_token
|
||||
|
||||
# Build the complete filter expression like the chat service does
|
||||
filter_expression = {}
|
||||
has_user_filters = False
|
||||
filter_clauses = []
|
||||
|
||||
if filters:
|
||||
# Map frontend filter names to backend field names
|
||||
field_mapping = {
|
||||
"data_sources": "filename",
|
||||
"document_types": "mimetype",
|
||||
"owners": "owner",
|
||||
}
|
||||
|
||||
for filter_key, values in filters.items():
|
||||
if values is not None and isinstance(values, list) and len(values) > 0:
|
||||
# Map frontend key to backend field name
|
||||
field_name = field_mapping.get(filter_key, filter_key)
|
||||
|
||||
if len(values) == 1:
|
||||
# Single value filter
|
||||
filter_clauses.append({"term": {field_name: values[0]}})
|
||||
else:
|
||||
# Multiple values filter
|
||||
filter_clauses.append({"terms": {field_name: values}})
|
||||
|
||||
if filter_clauses:
|
||||
has_user_filters = True
|
||||
|
||||
# If no user filters are active, exclude sample data from nudges
|
||||
if not has_user_filters:
|
||||
# Add a bool query with must_not to exclude sample data
|
||||
filter_clauses.append({
|
||||
"bool": {
|
||||
"must_not": [
|
||||
{"term": {"is_sample_data": "true"}}
|
||||
]
|
||||
}
|
||||
})
|
||||
logger.info("Excluding sample data from nudges (no user filters active)")
|
||||
|
||||
# Set the filter clauses if we have any
|
||||
if filter_clauses:
|
||||
filter_expression["filter"] = filter_clauses
|
||||
|
||||
# Add limit and score threshold to the filter expression (only if different from defaults)
|
||||
if limit and limit != 10: # 10 is the default limit
|
||||
filter_expression["limit"] = limit
|
||||
|
||||
if score_threshold and score_threshold != 0: # 0 is the default threshold
|
||||
filter_expression["score_threshold"] = score_threshold
|
||||
|
||||
# Pass the complete filter expression as a single header to Langflow (only if we have something to send)
|
||||
logger.info(
|
||||
"Sending OpenRAG query filter to Langflow nudges",
|
||||
filter_expression=filter_expression,
|
||||
)
|
||||
extra_headers["X-LANGFLOW-GLOBAL-VAR-OPENRAG-QUERY-FILTER"] = json.dumps(
|
||||
filter_expression
|
||||
)
|
||||
logger.info(f"[NUDGES] Extra headers {extra_headers}")
|
||||
|
||||
# Ensure the Langflow client exists; try lazy init if needed
|
||||
langflow_client = await clients.ensure_langflow_client()
|
||||
if not langflow_client:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue