diff --git a/agent/canvas.py b/agent/canvas.py index 5fb7af83f..b693ed434 100644 --- a/agent/canvas.py +++ b/agent/canvas.py @@ -91,9 +91,6 @@ class Graph: def load(self): self.components = self.dsl["components"] cpn_nms = set([]) - for k, cpn in self.components.items(): - cpn_nms.add(cpn["obj"]["component_name"]) - for k, cpn in self.components.items(): cpn_nms.add(cpn["obj"]["component_name"]) param = component_class(cpn["obj"]["component_name"] + "Param")() diff --git a/api/apps/sdk/doc.py b/api/apps/sdk/doc.py index 0a007f148..23f430a28 100644 --- a/api/apps/sdk/doc.py +++ b/api/apps/sdk/doc.py @@ -321,9 +321,7 @@ async def update_doc(tenant_id, dataset_id, document_id): try: if not DocumentService.update_by_id(doc.id, {"status": str(status)}): return get_error_data_result(message="Database error (Document update)!") - settings.docStoreConn.update({"doc_id": doc.id}, {"available_int": status}, search.index_name(kb.tenant_id), doc.kb_id) - return get_result(data=True) except Exception as e: return server_error_response(e) @@ -350,12 +348,10 @@ async def update_doc(tenant_id, dataset_id, document_id): } renamed_doc = {} for key, value in doc.to_dict().items(): - if key == "run": - renamed_doc["run"] = run_mapping.get(str(value)) new_key = key_mapping.get(key, key) renamed_doc[new_key] = value if key == "run": - renamed_doc["run"] = run_mapping.get(value) + renamed_doc["run"] = run_mapping.get(str(value)) return get_result(data=renamed_doc) diff --git a/common/constants.py b/common/constants.py index 574786d00..4cced1266 100644 --- a/common/constants.py +++ b/common/constants.py @@ -148,6 +148,7 @@ class Storage(Enum): AWS_S3 = 4 OSS = 5 OPENDAL = 6 + GCS = 7 # environment # ENV_STRONG_TEST_COUNT = "STRONG_TEST_COUNT" diff --git a/common/settings.py b/common/settings.py index 81a2c19a4..c8f7a6de4 100644 --- a/common/settings.py +++ b/common/settings.py @@ -31,6 +31,7 @@ import rag.utils.ob_conn import rag.utils.opensearch_conn from rag.utils.azure_sas_conn import RAGFlowAzureSasBlob from rag.utils.azure_spn_conn import RAGFlowAzureSpnBlob +from rag.utils.gcs_conn import RAGFlowGCS from rag.utils.minio_conn import RAGFlowMinio from rag.utils.opendal_conn import OpenDALStorage from rag.utils.s3_conn import RAGFlowS3 @@ -109,6 +110,7 @@ MINIO = {} OB = {} OSS = {} OS = {} +GCS = {} DOC_MAXIMUM_SIZE: int = 128 * 1024 * 1024 DOC_BULK_SIZE: int = 4 @@ -151,7 +153,8 @@ class StorageFactory: Storage.AZURE_SAS: RAGFlowAzureSasBlob, Storage.AWS_S3: RAGFlowS3, Storage.OSS: RAGFlowOSS, - Storage.OPENDAL: OpenDALStorage + Storage.OPENDAL: OpenDALStorage, + Storage.GCS: RAGFlowGCS, } @classmethod @@ -250,7 +253,7 @@ def init_settings(): else: raise Exception(f"Not supported doc engine: {DOC_ENGINE}") - global AZURE, S3, MINIO, OSS + global AZURE, S3, MINIO, OSS, GCS if STORAGE_IMPL_TYPE in ['AZURE_SPN', 'AZURE_SAS']: AZURE = get_base_config("azure", {}) elif STORAGE_IMPL_TYPE == 'AWS_S3': @@ -259,6 +262,8 @@ def init_settings(): MINIO = decrypt_database_config(name="minio") elif STORAGE_IMPL_TYPE == 'OSS': OSS = get_base_config("oss", {}) + elif STORAGE_IMPL_TYPE == 'GCS': + GCS = get_base_config("gcs", {}) global STORAGE_IMPL STORAGE_IMPL = StorageFactory.create(Storage[STORAGE_IMPL_TYPE]) diff --git a/conf/service_conf.yaml b/conf/service_conf.yaml index 07a7b32a9..82f2e9248 100644 --- a/conf/service_conf.yaml +++ b/conf/service_conf.yaml @@ -60,6 +60,8 @@ user_default_llm: # access_key: 'access_key' # secret_key: 'secret_key' # region: 'region' +#gcs: +# bucket: 'bridgtl-edm-d-bucket-ragflow' # oss: # access_key: 'access_key' # secret_key: 'secret_key' diff --git a/rag/app/presentation.py b/rag/app/presentation.py index 1404ac19b..7ce801749 100644 --- a/rag/app/presentation.py +++ b/rag/app/presentation.py @@ -86,9 +86,11 @@ class Pdf(PdfParser): # (A) Add text for b in self.boxes: - if not (from_page < b["page_number"] <= to_page + from_page): + # b["page_number"] is relative page number,must + from_page + global_page_num = b["page_number"] + from_page + if not (from_page < global_page_num <= to_page + from_page): continue - page_items[b["page_number"]].append({ + page_items[global_page_num].append({ "top": b["top"], "x0": b["x0"], "text": b["text"], @@ -100,7 +102,6 @@ class Pdf(PdfParser): if not positions: continue - # Handle content type (list vs str) if isinstance(content, list): final_text = "\n".join(content) elif isinstance(content, str): @@ -109,10 +110,11 @@ class Pdf(PdfParser): final_text = str(content) try: - # Parse positions pn_index = positions[0][0] if isinstance(pn_index, list): pn_index = pn_index[0] + + # pn_index in tbls is absolute page number current_page_num = int(pn_index) + 1 except Exception as e: print(f"Error parsing position: {e}") diff --git a/rag/utils/gcs_conn.py b/rag/utils/gcs_conn.py new file mode 100644 index 000000000..5268cea42 --- /dev/null +++ b/rag/utils/gcs_conn.py @@ -0,0 +1,207 @@ +# Copyright 2025 The InfiniFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import logging +import time +import datetime +from io import BytesIO +from google.cloud import storage +from google.api_core.exceptions import NotFound +from common.decorator import singleton +from common import settings + + +@singleton +class RAGFlowGCS: + def __init__(self): + self.client = None + self.bucket_name = None + self.__open__() + + def __open__(self): + try: + if self.client: + self.client = None + except Exception: + pass + + try: + self.client = storage.Client() + self.bucket_name = settings.GCS["bucket"] + except Exception: + logging.exception("Fail to connect to GCS") + + def _get_blob_path(self, folder, filename): + """Helper to construct the path: folder/filename""" + if not folder: + return filename + return f"{folder}/{filename}" + + def health(self): + folder, fnm, binary = "ragflow-health", "health_check", b"_t@@@1" + try: + bucket_obj = self.client.bucket(self.bucket_name) + if not bucket_obj.exists(): + logging.error(f"Health check failed: Main bucket '{self.bucket_name}' does not exist.") + return False + + blob_path = self._get_blob_path(folder, fnm) + blob = bucket_obj.blob(blob_path) + blob.upload_from_file(BytesIO(binary), content_type='application/octet-stream') + return True + except Exception as e: + logging.exception(f"Health check failed: {e}") + return False + + def put(self, bucket, fnm, binary, tenant_id=None): + # RENAMED PARAMETER: bucket_name -> bucket (to match interface) + for _ in range(3): + try: + bucket_obj = self.client.bucket(self.bucket_name) + blob_path = self._get_blob_path(bucket, fnm) + blob = bucket_obj.blob(blob_path) + + blob.upload_from_file(BytesIO(binary), content_type='application/octet-stream') + return True + except NotFound: + logging.error(f"Fail to put: Main bucket {self.bucket_name} does not exist.") + return False + except Exception: + logging.exception(f"Fail to put {bucket}/{fnm}:") + self.__open__() + time.sleep(1) + return False + + def rm(self, bucket, fnm, tenant_id=None): + # RENAMED PARAMETER: bucket_name -> bucket + try: + bucket_obj = self.client.bucket(self.bucket_name) + blob_path = self._get_blob_path(bucket, fnm) + blob = bucket_obj.blob(blob_path) + blob.delete() + except NotFound: + pass + except Exception: + logging.exception(f"Fail to remove {bucket}/{fnm}:") + + def get(self, bucket, filename, tenant_id=None): + # RENAMED PARAMETER: bucket_name -> bucket + for _ in range(1): + try: + bucket_obj = self.client.bucket(self.bucket_name) + blob_path = self._get_blob_path(bucket, filename) + blob = bucket_obj.blob(blob_path) + return blob.download_as_bytes() + except NotFound: + logging.warning(f"File not found {bucket}/{filename} in {self.bucket_name}") + return None + except Exception: + logging.exception(f"Fail to get {bucket}/{filename}") + self.__open__() + time.sleep(1) + return None + + def obj_exist(self, bucket, filename, tenant_id=None): + # RENAMED PARAMETER: bucket_name -> bucket + try: + bucket_obj = self.client.bucket(self.bucket_name) + blob_path = self._get_blob_path(bucket, filename) + blob = bucket_obj.blob(blob_path) + return blob.exists() + except Exception: + logging.exception(f"obj_exist {bucket}/{filename} got exception") + return False + + def bucket_exists(self, bucket): + # RENAMED PARAMETER: bucket_name -> bucket + try: + bucket_obj = self.client.bucket(self.bucket_name) + return bucket_obj.exists() + except Exception: + logging.exception(f"bucket_exist check for {self.bucket_name} got exception") + return False + + def get_presigned_url(self, bucket, fnm, expires, tenant_id=None): + # RENAMED PARAMETER: bucket_name -> bucket + for _ in range(10): + try: + bucket_obj = self.client.bucket(self.bucket_name) + blob_path = self._get_blob_path(bucket, fnm) + blob = bucket_obj.blob(blob_path) + + expiration = expires + if isinstance(expires, int): + expiration = datetime.timedelta(seconds=expires) + + url = blob.generate_signed_url( + version="v4", + expiration=expiration, + method="GET" + ) + return url + except Exception: + logging.exception(f"Fail to get_presigned {bucket}/{fnm}:") + self.__open__() + time.sleep(1) + return None + + def remove_bucket(self, bucket): + # RENAMED PARAMETER: bucket_name -> bucket + try: + bucket_obj = self.client.bucket(self.bucket_name) + prefix = f"{bucket}/" + + blobs = list(self.client.list_blobs(self.bucket_name, prefix=prefix)) + + if blobs: + bucket_obj.delete_blobs(blobs) + except Exception: + logging.exception(f"Fail to remove virtual bucket (folder) {bucket}") + + def copy(self, src_bucket, src_path, dest_bucket, dest_path): + # RENAMED PARAMETERS to match original interface + try: + bucket_obj = self.client.bucket(self.bucket_name) + + src_blob_path = self._get_blob_path(src_bucket, src_path) + dest_blob_path = self._get_blob_path(dest_bucket, dest_path) + + src_blob = bucket_obj.blob(src_blob_path) + + if not src_blob.exists(): + logging.error(f"Source object not found: {src_blob_path}") + return False + + bucket_obj.copy_blob(src_blob, bucket_obj, dest_blob_path) + return True + + except NotFound: + logging.error(f"Copy failed: Main bucket {self.bucket_name} does not exist.") + return False + except Exception: + logging.exception(f"Fail to copy {src_bucket}/{src_path} -> {dest_bucket}/{dest_path}") + return False + + def move(self, src_bucket, src_path, dest_bucket, dest_path): + try: + if self.copy(src_bucket, src_path, dest_bucket, dest_path): + self.rm(src_bucket, src_path) + return True + else: + logging.error(f"Copy failed, move aborted: {src_bucket}/{src_path}") + return False + except Exception: + logging.exception(f"Fail to move {src_bucket}/{src_path} -> {dest_bucket}/{dest_path}") + return False \ No newline at end of file diff --git a/web/src/components/json-edit/index.tsx b/web/src/components/json-edit/index.tsx index 2ab49c4e2..31586c59e 100644 --- a/web/src/components/json-edit/index.tsx +++ b/web/src/components/json-edit/index.tsx @@ -1,8 +1,9 @@ -import React, { useEffect, useRef } from 'react'; +import React, { useEffect, useRef, useState } from 'react'; import { useTranslation } from 'react-i18next'; import './css/cloud9_night.less'; import './css/index.less'; import { JsonEditorOptions, JsonEditorProps } from './interface'; + const defaultConfig: JsonEditorOptions = { mode: 'code', modes: ['tree', 'code'], @@ -14,6 +15,7 @@ const defaultConfig: JsonEditorOptions = { enableTransform: false, indentation: 2, }; + const JsonEditor: React.FC = ({ value, onChange, @@ -25,43 +27,62 @@ const JsonEditor: React.FC = ({ const editorRef = useRef(null); const { i18n } = useTranslation(); const currentLanguageRef = useRef(i18n.language); + const [isLoading, setIsLoading] = useState(true); useEffect(() => { - if (typeof window !== 'undefined') { - const JSONEditor = require('jsoneditor'); - import('jsoneditor/dist/jsoneditor.min.css'); + let isMounted = true; - if (containerRef.current) { - // Default configuration options - const defaultOptions: JsonEditorOptions = { - ...defaultConfig, - language: i18n.language === 'zh' ? 'zh-CN' : 'en', - onChange: () => { - if (editorRef.current && onChange) { - try { - const updatedJson = editorRef.current.get(); - onChange(updatedJson); - } catch (err) { - // Do not trigger onChange when parsing error occurs - console.error(err); - } + const initEditor = async () => { + if (typeof window !== 'undefined') { + try { + const JSONEditorModule = await import('jsoneditor'); + const JSONEditor = JSONEditorModule.default || JSONEditorModule; + + await import('jsoneditor/dist/jsoneditor.min.css'); + + if (isMounted && containerRef.current) { + // Default configuration options + const defaultOptions: JsonEditorOptions = { + ...defaultConfig, + language: i18n.language === 'zh' ? 'zh-CN' : 'en', + onChange: () => { + if (editorRef.current && onChange) { + try { + const updatedJson = editorRef.current.get(); + onChange(updatedJson); + } catch (err) { + // Do not trigger onChange when parsing error occurs + console.error(err); + } + } + }, + ...options, // Merge user provided options with defaults + }; + + editorRef.current = new JSONEditor( + containerRef.current, + defaultOptions, + ); + + if (value) { + editorRef.current.set(value); } - }, - ...options, // Merge user provided options with defaults - }; - editorRef.current = new JSONEditor( - containerRef.current, - defaultOptions, - ); - - if (value) { - editorRef.current.set(value); + setIsLoading(false); + } + } catch (error) { + console.error('Failed to load jsoneditor:', error); + if (isMounted) { + setIsLoading(false); + } } } - } + }; + + initEditor(); return () => { + isMounted = false; if (editorRef.current) { if (typeof editorRef.current.destroy === 'function') { editorRef.current.destroy(); @@ -92,26 +113,38 @@ const JsonEditor: React.FC = ({ } // Recreate the editor with new language - const JSONEditor = require('jsoneditor'); + const initEditorWithNewLanguage = async () => { + try { + const JSONEditorModule = await import('jsoneditor'); + const JSONEditor = JSONEditorModule.default || JSONEditorModule; - const newOptions: JsonEditorOptions = { - ...defaultConfig, - language: i18n.language === 'zh' ? 'zh-CN' : 'en', - onChange: () => { - if (editorRef.current && onChange) { - try { - const updatedJson = editorRef.current.get(); - onChange(updatedJson); - } catch (err) { - // Do not trigger onChange when parsing error occurs - } - } - }, - ...options, // Merge user provided options with defaults + const newOptions: JsonEditorOptions = { + ...defaultConfig, + language: i18n.language === 'zh' ? 'zh-CN' : 'en', + onChange: () => { + if (editorRef.current && onChange) { + try { + const updatedJson = editorRef.current.get(); + onChange(updatedJson); + } catch (err) { + // Do not trigger onChange when parsing error occurs + } + } + }, + ...options, // Merge user provided options with defaults + }; + + editorRef.current = new JSONEditor(containerRef.current, newOptions); + editorRef.current.set(currentData); + } catch (error) { + console.error( + 'Failed to reload jsoneditor with new language:', + error, + ); + } }; - editorRef.current = new JSONEditor(containerRef.current, newOptions); - editorRef.current.set(currentData); + initEditorWithNewLanguage(); } }, [i18n.language, value, onChange, options]); @@ -135,7 +168,13 @@ const JsonEditor: React.FC = ({ ref={containerRef} style={{ height }} className={`ace-tomorrow-night w-full border border-border-button rounded-lg overflow-hidden bg-bg-input ${className} `} - /> + > + {isLoading && ( +
+
Loading editor...
+
+ )} + ); }; diff --git a/web/src/custom.d.ts b/web/src/custom.d.ts index f73d61b39..dafdf09f1 100644 --- a/web/src/custom.d.ts +++ b/web/src/custom.d.ts @@ -2,3 +2,9 @@ declare module '*.md' { const content: string; export default content; } + +declare module 'jsoneditor' { + const JSONEditor: any; + export default JSONEditor; + export = JSONEditor; +} diff --git a/web/src/pages/agent/canvas/index.tsx b/web/src/pages/agent/canvas/index.tsx index 7be70be83..6b954d069 100644 --- a/web/src/pages/agent/canvas/index.tsx +++ b/web/src/pages/agent/canvas/index.tsx @@ -40,6 +40,7 @@ import { useDropdownManager } from './context'; import { AgentBackground } from '@/components/canvas/background'; import Spotlight from '@/components/spotlight'; +import { useNodeLoading } from '../hooks/use-node-loading'; import { useHideFormSheetOnNodeDeletion, useShowDrawer, @@ -166,6 +167,8 @@ function AgentCanvas({ drawerVisible, hideDrawer }: IProps) { }); const [lastSendLoading, setLastSendLoading] = useState(false); + const [currentSendLoading, setCurrentSendLoading] = useState(false); + const { handleBeforeDelete } = useBeforeDelete(); const { addCanvasNode, addNoteNode } = useAddNode(reactFlowInstance); @@ -182,6 +185,7 @@ function AgentCanvas({ drawerVisible, hideDrawer }: IProps) { }, [chatVisible, clearEventList, currentTaskId, stopMessage]); const setLastSendLoadingFunc = (loading: boolean, messageId: string) => { + setCurrentSendLoading(!!loading); if (messageId === currentMessageId) { setLastSendLoading(loading); } else { @@ -249,7 +253,10 @@ function AgentCanvas({ drawerVisible, hideDrawer }: IProps) { clearActiveDropdown, removePlaceholderNode, ]); - + const { lastNode, setDerivedMessages, startButNotFinishedNodeIds } = + useNodeLoading({ + currentEventListWithoutMessageById, + }); return (
- + )} + {chatVisible && ( - + {isHeadAgent && ( <> diff --git a/web/src/pages/agent/canvas/node/begin-node.tsx b/web/src/pages/agent/canvas/node/begin-node.tsx index 48f69e5f6..053ef925a 100644 --- a/web/src/pages/agent/canvas/node/begin-node.tsx +++ b/web/src/pages/agent/canvas/node/begin-node.tsx @@ -24,7 +24,7 @@ function InnerBeginNode({ data, id, selected }: NodeProps) { const inputs: Record = get(data, 'form.inputs', {}); return ( - + - + diff --git a/web/src/pages/agent/canvas/node/exit-loop-node.tsx b/web/src/pages/agent/canvas/node/exit-loop-node.tsx index e6bd6ba3d..25b43e4ff 100644 --- a/web/src/pages/agent/canvas/node/exit-loop-node.tsx +++ b/web/src/pages/agent/canvas/node/exit-loop-node.tsx @@ -14,7 +14,7 @@ export function ExitLoopNode({ id, data, selected }: NodeProps>) { showRun={false} showCopy={false} > - + diff --git a/web/src/pages/agent/canvas/node/file-node.tsx b/web/src/pages/agent/canvas/node/file-node.tsx index d868d70fa..a0705b0b7 100644 --- a/web/src/pages/agent/canvas/node/file-node.tsx +++ b/web/src/pages/agent/canvas/node/file-node.tsx @@ -23,7 +23,7 @@ function InnerFileNode({ data, id, selected }: NodeProps) { const inputs: Record = get(data, 'form.inputs', {}); return ( - + - + ) { const messages: string[] = get(data, 'form.content', []); return ( - + & { selected?: boolean }; -export function NodeWrapper({ children, className, selected }: IProps) { +export function NodeWrapper({ children, className, selected, id }: IProps) { + const { currentSendLoading, startButNotFinishedNodeIds = [] } = + useContext(AgentInstanceContext); return (
+ {id && + startButNotFinishedNodeIds.indexOf(id as string) > -1 && + currentSendLoading && ( +
+ +
+ )} {children}
); diff --git a/web/src/pages/agent/canvas/node/parser-node.tsx b/web/src/pages/agent/canvas/node/parser-node.tsx index d66c79c45..96a83050a 100644 --- a/web/src/pages/agent/canvas/node/parser-node.tsx +++ b/web/src/pages/agent/canvas/node/parser-node.tsx @@ -19,7 +19,7 @@ function ParserNode({ }: NodeProps>) { const { t } = useTranslation(); return ( - + - + - + ) { const { positions } = useBuildSwitchHandlePositions({ data, id }); return ( - +
diff --git a/web/src/pages/agent/canvas/node/tokenizer-node.tsx b/web/src/pages/agent/canvas/node/tokenizer-node.tsx index 830ababdd..24208b8ea 100644 --- a/web/src/pages/agent/canvas/node/tokenizer-node.tsx +++ b/web/src/pages/agent/canvas/node/tokenizer-node.tsx @@ -27,7 +27,7 @@ function TokenizerNode({ showRun={false} showCopy={false} > - + + = diff --git a/web/src/pages/agent/context.ts b/web/src/pages/agent/context.ts index 6839554d3..3b9a4e5c9 100644 --- a/web/src/pages/agent/context.ts +++ b/web/src/pages/agent/context.ts @@ -1,6 +1,8 @@ +import { INodeEvent } from '@/hooks/use-send-message'; +import { IMessage } from '@/interfaces/database/chat'; import { RAGFlowNodeType } from '@/interfaces/database/flow'; import { HandleType, Position } from '@xyflow/react'; -import { createContext } from 'react'; +import { Dispatch, SetStateAction, createContext } from 'react'; import { useAddNode } from './hooks/use-add-node'; import { useCacheChatLog } from './hooks/use-cache-chat-log'; import { useShowFormDrawer, useShowLogSheet } from './hooks/use-show-drawer'; @@ -13,7 +15,11 @@ type AgentInstanceContextType = Pick< ReturnType, 'addCanvasNode' > & - Pick, 'showFormDrawer'>; + Pick, 'showFormDrawer'> & { + lastNode: INodeEvent | null; + currentSendLoading: boolean; + startButNotFinishedNodeIds: string[]; + }; export const AgentInstanceContext = createContext( {} as AgentInstanceContextType, @@ -22,7 +28,10 @@ export const AgentInstanceContext = createContext( type AgentChatContextType = Pick< ReturnType, 'showLogSheet' -> & { setLastSendLoadingFunc: (loading: boolean, messageId: string) => void }; +> & { + setLastSendLoadingFunc: (loading: boolean, messageId: string) => void; + setDerivedMessages: Dispatch>; +}; export const AgentChatContext = createContext( {} as AgentChatContextType, diff --git a/web/src/pages/agent/form-sheet/next.tsx b/web/src/pages/agent/form-sheet/next.tsx index 5c759a5e5..b9ebeac51 100644 --- a/web/src/pages/agent/form-sheet/next.tsx +++ b/web/src/pages/agent/form-sheet/next.tsx @@ -55,7 +55,7 @@ const FormSheet = ({ diff --git a/web/src/pages/agent/hooks/use-node-loading.ts b/web/src/pages/agent/hooks/use-node-loading.ts new file mode 100644 index 000000000..d92702f56 --- /dev/null +++ b/web/src/pages/agent/hooks/use-node-loading.ts @@ -0,0 +1,88 @@ +import { + INodeData, + INodeEvent, + MessageEventType, +} from '@/hooks/use-send-message'; +import { IMessage } from '@/interfaces/database/chat'; +import { useCallback, useMemo, useState } from 'react'; + +export const useNodeLoading = ({ + currentEventListWithoutMessageById, +}: { + currentEventListWithoutMessageById: (messageId: string) => INodeEvent[]; +}) => { + const [derivedMessages, setDerivedMessages] = useState(); + + const lastMessageId = useMemo(() => { + return derivedMessages?.[derivedMessages?.length - 1]?.id; + }, [derivedMessages]); + + const currentEventListWithoutMessage = useMemo(() => { + if (!lastMessageId) { + return []; + } + return currentEventListWithoutMessageById(lastMessageId); + }, [currentEventListWithoutMessageById, lastMessageId]); + + const startedNodeList = useMemo(() => { + const duplicateList = currentEventListWithoutMessage?.filter( + (x) => x.event === MessageEventType.NodeStarted, + ) as INodeEvent[]; + + // Remove duplicate nodes + return duplicateList?.reduce>((pre, cur) => { + if (pre.every((x) => x.data.component_id !== cur.data.component_id)) { + pre.push(cur); + } + return pre; + }, []); + }, [currentEventListWithoutMessage]); + + const filterFinishedNodeList = useCallback(() => { + const nodeEventList = currentEventListWithoutMessage + .filter( + (x) => x.event === MessageEventType.NodeFinished, + // x.event === MessageEventType.NodeFinished && + // (x.data as INodeData)?.component_id === componentId, + ) + .map((x) => x.data); + + return nodeEventList; + }, [currentEventListWithoutMessage]); + + const lastNode = useMemo(() => { + if (!startedNodeList) { + return null; + } + return startedNodeList[startedNodeList.length - 1]; + }, [startedNodeList]); + + const startNodeIds = useMemo(() => { + if (!startedNodeList) { + return []; + } + return startedNodeList.map((x) => x.data.component_id); + }, [startedNodeList]); + + const finishNodeIds = useMemo(() => { + if (!lastNode) { + return []; + } + const nodeDataList = filterFinishedNodeList(); + const finishNodeIdsTemp = nodeDataList.map( + (x: INodeData) => x.component_id, + ); + return Array.from(new Set(finishNodeIdsTemp)); + }, [lastNode, filterFinishedNodeList]); + + const startButNotFinishedNodeIds = useMemo(() => { + return startNodeIds.filter((x) => !finishNodeIds.includes(x)); + }, [finishNodeIds, startNodeIds]); + + return { + lastNode, + startButNotFinishedNodeIds, + filterFinishedNodeList, + setDerivedMessages, + }; +}; diff --git a/web/src/pages/agent/log-sheet/index.tsx b/web/src/pages/agent/log-sheet/index.tsx index 76c0b0865..bea2808a2 100644 --- a/web/src/pages/agent/log-sheet/index.tsx +++ b/web/src/pages/agent/log-sheet/index.tsx @@ -26,7 +26,7 @@ export function LogSheet({ return ( e.preventDefault()} >