Merge pull request #2124 from danielaskdd/label-search

Refactor(WebUI): Change Client-side Search Logic with Server-driven Enity Name Search
This commit is contained in:
Daniel.y 2025-09-20 15:31:42 +08:00 committed by GitHub
commit 36f242c164
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
74 changed files with 2292 additions and 1026 deletions

View file

@ -313,7 +313,7 @@ POSTGRES_IVFFLAT_LISTS=100
NEO4J_URI=neo4j+s://xxxxxxxx.databases.neo4j.io
NEO4J_USERNAME=neo4j
NEO4J_PASSWORD='your_password'
NEO4J_DATABASE=noe4j
NEO4J_DATABASE=neo4j
NEO4J_MAX_CONNECTION_POOL_SIZE=100
NEO4J_CONNECTION_TIMEOUT=30
NEO4J_CONNECTION_ACQUISITION_TIMEOUT=30

View file

@ -1 +1 @@
__api_version__ = "0224"
__api_version__ = "0225"

View file

@ -45,6 +45,56 @@ def create_graph_routes(rag, api_key: Optional[str] = None):
status_code=500, detail=f"Error getting graph labels: {str(e)}"
)
@router.get("/graph/label/popular", dependencies=[Depends(combined_auth)])
async def get_popular_labels(
limit: int = Query(
300, description="Maximum number of popular labels to return", ge=1, le=1000
),
):
"""
Get popular labels by node degree (most connected entities)
Args:
limit (int): Maximum number of labels to return (default: 300, max: 1000)
Returns:
List[str]: List of popular labels sorted by degree (highest first)
"""
try:
return await rag.chunk_entity_relation_graph.get_popular_labels(limit)
except Exception as e:
logger.error(f"Error getting popular labels: {str(e)}")
logger.error(traceback.format_exc())
raise HTTPException(
status_code=500, detail=f"Error getting popular labels: {str(e)}"
)
@router.get("/graph/label/search", dependencies=[Depends(combined_auth)])
async def search_labels(
q: str = Query(..., description="Search query string"),
limit: int = Query(
50, description="Maximum number of search results to return", ge=1, le=100
),
):
"""
Search labels with fuzzy matching
Args:
q (str): Search query string
limit (int): Maximum number of results to return (default: 50, max: 100)
Returns:
List[str]: List of matching labels sorted by relevance
"""
try:
return await rag.chunk_entity_relation_graph.search_labels(q, limit)
except Exception as e:
logger.error(f"Error searching labels with query '{q}': {str(e)}")
logger.error(traceback.format_exc())
raise HTTPException(
status_code=500, detail=f"Error searching labels: {str(e)}"
)
@router.get("/graphs", dependencies=[Depends(combined_auth)])
async def get_knowledge_graph(
label: str = Query(..., description="Label to get knowledge graph for"),

View file

@ -1 +1 @@
import{e as o,c as l,g as b,k as O,h as P,j as p,l as w,m as c,n as v,t as A,o as N}from"./_baseUniq-DkG1DiI3.js";import{a_ as g,aw as _,a$ as $,b0 as E,b1 as F,b2 as x,b3 as M,b4 as y,b5 as B,b6 as T}from"./mermaid-vendor-BOzHoVUU.js";var S=/\s/;function G(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var H=/^\s+/;function L(n){return n&&n.slice(0,G(n)+1).replace(H,"")}var m=NaN,R=/^[-+]0x[0-9a-f]+$/i,q=/^0b[01]+$/i,z=/^0o[0-7]+$/i,C=parseInt;function K(n){if(typeof n=="number")return n;if(o(n))return m;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=L(n);var t=q.test(n);return t||z.test(n)?C(n.slice(2),t?2:8):R.test(n)?m:+n}var W=1/0,X=17976931348623157e292;function Y(n){if(!n)return n===0?n:0;if(n=K(n),n===W||n===-1/0){var r=n<0?-1:1;return r*X}return n===n?n:0}function D(n){var r=Y(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?l(n):[]}var I=Object.prototype,J=I.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,i=e>2?r[2]:void 0;for(i&&$(r[0],r[1],i)&&(e=1);++t<e;)for(var f=r[t],a=E(f),s=-1,d=a.length;++s<d;){var u=a[s],h=n[u];(h===void 0||F(h,I[u])&&!J.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function Q(n){return function(r,t,e){var i=Object(r);if(!x(r)){var f=b(t);r=O(r),t=function(s){return f(i[s],s,i)}}var a=n(r,t,e);return a>-1?i[f?r[a]:a]:void 0}}var U=Math.max;function Z(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var i=t==null?0:D(t);return i<0&&(i=U(e+i,0)),P(n,b(r),i)}var hn=Q(Z);function V(n,r){var t=-1,e=x(n)?Array(n.length):[];return p(n,function(i,f,a){e[++t]=r(i,f,a)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,b(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function bn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,i=n.length;++e<i;){var f=n[e],a=r(f);if(a!=null&&(s===void 0?a===a&&!o(a):t(a,s)))var s=a,d=f}return d}function mn(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=v(r,n);for(var i=-1,f=r.length,a=f-1,s=n;s!=null&&++i<f;){var d=A(r[i]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(i!=a){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[i+1])?[]:{})}T(s,d,u),s=s[d]}return n}function on(n,r,t){for(var e=-1,i=r.length,f={};++e<i;){var a=r[e],s=N(n,a);t(s,a)&&an(f,v(a,n),s)}return f}export{rn as a,tn as b,V as c,on as d,mn as e,fn as f,hn as g,bn as h,dn as i,D as j,un as l,gn as m,Y as t};
import{e as o,c as l,g as b,k as O,h as P,j as p,l as w,m as c,n as v,t as A,o as N}from"./_baseUniq-25sRMAXf.js";import{a_ as g,aw as _,a$ as $,b0 as E,b1 as F,b2 as x,b3 as M,b4 as y,b5 as B,b6 as T}from"./mermaid-vendor-Btdx_ET5.js";var S=/\s/;function G(n){for(var r=n.length;r--&&S.test(n.charAt(r)););return r}var H=/^\s+/;function L(n){return n&&n.slice(0,G(n)+1).replace(H,"")}var m=NaN,R=/^[-+]0x[0-9a-f]+$/i,q=/^0b[01]+$/i,z=/^0o[0-7]+$/i,C=parseInt;function K(n){if(typeof n=="number")return n;if(o(n))return m;if(g(n)){var r=typeof n.valueOf=="function"?n.valueOf():n;n=g(r)?r+"":r}if(typeof n!="string")return n===0?n:+n;n=L(n);var t=q.test(n);return t||z.test(n)?C(n.slice(2),t?2:8):R.test(n)?m:+n}var W=1/0,X=17976931348623157e292;function Y(n){if(!n)return n===0?n:0;if(n=K(n),n===W||n===-1/0){var r=n<0?-1:1;return r*X}return n===n?n:0}function D(n){var r=Y(n),t=r%1;return r===r?t?r-t:r:0}function fn(n){var r=n==null?0:n.length;return r?l(n):[]}var I=Object.prototype,J=I.hasOwnProperty,dn=_(function(n,r){n=Object(n);var t=-1,e=r.length,i=e>2?r[2]:void 0;for(i&&$(r[0],r[1],i)&&(e=1);++t<e;)for(var f=r[t],a=E(f),s=-1,d=a.length;++s<d;){var u=a[s],h=n[u];(h===void 0||F(h,I[u])&&!J.call(n,u))&&(n[u]=f[u])}return n});function un(n){var r=n==null?0:n.length;return r?n[r-1]:void 0}function Q(n){return function(r,t,e){var i=Object(r);if(!x(r)){var f=b(t);r=O(r),t=function(s){return f(i[s],s,i)}}var a=n(r,t,e);return a>-1?i[f?r[a]:a]:void 0}}var U=Math.max;function Z(n,r,t){var e=n==null?0:n.length;if(!e)return-1;var i=t==null?0:D(t);return i<0&&(i=U(e+i,0)),P(n,b(r),i)}var hn=Q(Z);function V(n,r){var t=-1,e=x(n)?Array(n.length):[];return p(n,function(i,f,a){e[++t]=r(i,f,a)}),e}function gn(n,r){var t=M(n)?w:V;return t(n,b(r))}var j=Object.prototype,k=j.hasOwnProperty;function nn(n,r){return n!=null&&k.call(n,r)}function bn(n,r){return n!=null&&c(n,r,nn)}function rn(n,r){return n<r}function tn(n,r,t){for(var e=-1,i=n.length;++e<i;){var f=n[e],a=r(f);if(a!=null&&(s===void 0?a===a&&!o(a):t(a,s)))var s=a,d=f}return d}function mn(n){return n&&n.length?tn(n,y,rn):void 0}function an(n,r,t,e){if(!g(n))return n;r=v(r,n);for(var i=-1,f=r.length,a=f-1,s=n;s!=null&&++i<f;){var d=A(r[i]),u=t;if(d==="__proto__"||d==="constructor"||d==="prototype")return n;if(i!=a){var h=s[d];u=void 0,u===void 0&&(u=g(h)?h:B(r[i+1])?[]:{})}T(s,d,u),s=s[d]}return n}function on(n,r,t){for(var e=-1,i=r.length,f={};++e<i;){var a=r[e],s=N(n,a);t(s,a)&&an(f,v(a,n),s)}return f}export{rn as a,tn as b,V as c,on as d,mn as e,fn as f,hn as g,bn as h,dn as i,D as j,un as l,gn as m,Y as t};

File diff suppressed because one or more lines are too long

View file

@ -1 +1 @@
import{_ as l}from"./mermaid-vendor-BOzHoVUU.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};
import{_ as l}from"./mermaid-vendor-Btdx_ET5.js";function m(e,c){var i,t,o;e.accDescr&&((i=c.setAccDescription)==null||i.call(c,e.accDescr)),e.accTitle&&((t=c.setAccTitle)==null||t.call(c,e.accTitle)),e.title&&((o=c.setDiagramTitle)==null||o.call(c,e.title))}l(m,"populateCommonDb");export{m as p};

View file

@ -1 +1 @@
import{_ as n,a2 as x,j as l}from"./mermaid-vendor-BOzHoVUU.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};
import{_ as n,a2 as x,j as l}from"./mermaid-vendor-Btdx_ET5.js";var c=n((a,t)=>{const e=a.append("rect");if(e.attr("x",t.x),e.attr("y",t.y),e.attr("fill",t.fill),e.attr("stroke",t.stroke),e.attr("width",t.width),e.attr("height",t.height),t.name&&e.attr("name",t.name),t.rx&&e.attr("rx",t.rx),t.ry&&e.attr("ry",t.ry),t.attrs!==void 0)for(const r in t.attrs)e.attr(r,t.attrs[r]);return t.class&&e.attr("class",t.class),e},"drawRect"),d=n((a,t)=>{const e={x:t.startx,y:t.starty,width:t.stopx-t.startx,height:t.stopy-t.starty,fill:t.fill,stroke:t.stroke,class:"rect"};c(a,e).lower()},"drawBackgroundRect"),g=n((a,t)=>{const e=t.text.replace(x," "),r=a.append("text");r.attr("x",t.x),r.attr("y",t.y),r.attr("class","legend"),r.style("text-anchor",t.anchor),t.class&&r.attr("class",t.class);const s=r.append("tspan");return s.attr("x",t.x+t.textMargin*2),s.text(e),r},"drawText"),h=n((a,t,e,r)=>{const s=a.append("image");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",i)},"drawImage"),m=n((a,t,e,r)=>{const s=a.append("use");s.attr("x",t),s.attr("y",e);const i=l.sanitizeUrl(r);s.attr("xlink:href",`#${i}`)},"drawEmbeddedImage"),y=n(()=>({x:0,y:0,width:100,height:100,fill:"#EDF2AE",stroke:"#666",anchor:"start",rx:0,ry:0}),"getNoteRect"),p=n(()=>({x:0,y:0,width:100,height:100,"text-anchor":"start",style:"#666",textMargin:0,rx:0,ry:0,tspan:!0}),"getTextObj");export{d as a,p as b,m as c,c as d,h as e,g as f,y as g};

View file

@ -1 +1 @@
import{_ as s}from"./mermaid-vendor-BOzHoVUU.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};
import{_ as s}from"./mermaid-vendor-Btdx_ET5.js";var t,e=(t=class{constructor(i){this.init=i,this.records=this.init()}reset(){this.records=this.init()}},s(t,"ImperativeState"),t);export{e as I};

View file

@ -1 +1 @@
import{_ as a,d as o}from"./mermaid-vendor-BOzHoVUU.js";var d=a((t,e)=>{let n;return e==="sandbox"&&(n=o("#i"+t)),(e==="sandbox"?o(n.nodes()[0].contentDocument.body):o("body")).select(`[id="${t}"]`)},"getDiagramElement");export{d as g};
import{_ as a,d as o}from"./mermaid-vendor-Btdx_ET5.js";var d=a((t,e)=>{let n;return e==="sandbox"&&(n=o("#i"+t)),(e==="sandbox"?o(n.nodes()[0].contentDocument.body):o("body")).select(`[id="${t}"]`)},"getDiagramElement");export{d as g};

View file

@ -1,4 +1,4 @@
import{_ as e}from"./mermaid-vendor-BOzHoVUU.js";var l=e(()=>`
import{_ as e}from"./mermaid-vendor-Btdx_ET5.js";var l=e(()=>`
/* Font Awesome icon styling - consolidated */
.label-icon {
display: inline-block;

View file

@ -1 +1 @@
import{_ as a,e as w,l as x}from"./mermaid-vendor-BOzHoVUU.js";var d=a((e,t,i,o)=>{e.attr("class",i);const{width:r,height:h,x:n,y:c}=u(e,t);w(e,h,r,o);const s=l(n,c,r,h,t);e.attr("viewBox",s),x.debug(`viewBox configured: ${s} with padding: ${t}`)},"setupViewPortForSVG"),u=a((e,t)=>{var o;const i=((o=e.node())==null?void 0:o.getBBox())||{width:0,height:0,x:0,y:0};return{width:i.width+t*2,height:i.height+t*2,x:i.x,y:i.y}},"calculateDimensionsWithPadding"),l=a((e,t,i,o,r)=>`${e-r} ${t-r} ${i} ${o}`,"createViewBox");export{d as s};
import{_ as a,e as w,l as x}from"./mermaid-vendor-Btdx_ET5.js";var d=a((e,t,i,o)=>{e.attr("class",i);const{width:r,height:h,x:n,y:c}=u(e,t);w(e,h,r,o);const s=l(n,c,r,h,t);e.attr("viewBox",s),x.debug(`viewBox configured: ${s} with padding: ${t}`)},"setupViewPortForSVG"),u=a((e,t)=>{var o;const i=((o=e.node())==null?void 0:o.getBBox())||{width:0,height:0,x:0,y:0};return{width:i.width+t*2,height:i.height+t*2,x:i.x,y:i.y}},"calculateDimensionsWithPadding"),l=a((e,t,i,o,r)=>`${e-r} ${t-r} ${i} ${o}`,"createViewBox");export{d as s};

View file

@ -0,0 +1 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-CiwMLJM4.js";import{_ as i}from"./mermaid-vendor-Btdx_ET5.js";import"./chunk-E2GYISFI-3iVC229r.js";import"./chunk-BFAMUDN2-BjxoQgRC.js";import"./chunk-SKB7J2MH-Cuc7MmmW.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -1 +0,0 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-CsKBHaWY.js";import{_ as i}from"./mermaid-vendor-BOzHoVUU.js";import"./chunk-E2GYISFI-Bj4t4I37.js";import"./chunk-BFAMUDN2-CmfJj-4x.js";import"./chunk-SKB7J2MH-Bl1qvyQL.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -0,0 +1 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-CiwMLJM4.js";import{_ as i}from"./mermaid-vendor-Btdx_ET5.js";import"./chunk-E2GYISFI-3iVC229r.js";import"./chunk-BFAMUDN2-BjxoQgRC.js";import"./chunk-SKB7J2MH-Cuc7MmmW.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -1 +0,0 @@
import{s as a,c as s,a as e,C as t}from"./chunk-SZ463SBG-CsKBHaWY.js";import{_ as i}from"./mermaid-vendor-BOzHoVUU.js";import"./chunk-E2GYISFI-Bj4t4I37.js";import"./chunk-BFAMUDN2-CmfJj-4x.js";import"./chunk-SKB7J2MH-Bl1qvyQL.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var c={parser:e,get db(){return new t},renderer:s,styles:a,init:i(r=>{r.class||(r.class={}),r.class.arrowMarkerAbsolute=r.arrowMarkerAbsolute},"init")};export{c as diagram};

View file

@ -1 +0,0 @@
import{b as r}from"./_baseUniq-DkG1DiI3.js";var e=4;function a(o){return r(o,e)}export{a as c};

View file

@ -0,0 +1 @@
import{b as r}from"./_baseUniq-25sRMAXf.js";var e=4;function a(o){return r(o,e)}export{a as c};

View file

@ -1,4 +1,4 @@
import{p as y}from"./chunk-353BL4L5-DshwP1ma.js";import{_ as l,s as B,g as S,t as z,q as F,a as P,b as E,F as v,K as W,e as T,z as D,G as _,H as A,l as w}from"./mermaid-vendor-BOzHoVUU.js";import{p as N}from"./treemap-75Q7IDZK-BCGfM6IV.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-DkG1DiI3.js";import"./_basePickBy-BZlRisJu.js";import"./clone-CXHLa2LR.js";var x={packet:[]},m=structuredClone(x),L=A.packet,Y=l(()=>{const t=v({...L,..._().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=l(()=>m.packet,"getPacket"),H=l(t=>{t.length>0&&m.packet.push(t)},"pushWord"),I=l(()=>{D(),m=structuredClone(x)},"clear"),u={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:E,getAccTitle:P,setDiagramTitle:F,getDiagramTitle:z,getAccDescription:S,setAccDescription:B},K=1e4,M=l(t=>{y(t,u);let e=-1,o=[],n=1;const{bitsPerRow:i}=u.getConfig();for(let{start:a,end:r,bits:c,label:f}of t.blocks){if(a!==void 0&&r!==void 0&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a??(a=e+1),a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);if(c===0)throw new Error(`Packet block ${a} is invalid. Cannot have a zero bit field.`);for(r??(r=a+(c??1)-1),c??(c=r-a+1),e=r,w.debug(`Packet block ${a} - ${e} with label ${f}`);o.length<=i+1&&u.getPacket().length<K;){const[d,p]=O({start:a,end:r,bits:c,label:f},n,i);if(o.push(d),d.end+1===n*i&&(u.pushWord(o),o=[],n++),!p)break;({start:a,end:r,bits:c,label:f}=p)}}u.pushWord(o)},"populate"),O=l((t,e,o)=>{if(t.start===void 0)throw new Error("start should have been set during first phase");if(t.end===void 0)throw new Error("end should have been set during first phase");if(t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);if(t.end+1<=e*o)return[t,void 0];const n=e*o-1,i=e*o;return[{start:t.start,end:n,label:t.label,bits:n-t.start},{start:i,end:t.end,label:t.label,bits:t.end-i}]},"getNextFittingBlock"),q={parse:l(async t=>{const e=await N("packet",t);w.debug(e),M(e)},"parse")},R=l((t,e,o,n)=>{const i=n.db,a=i.getConfig(),{rowHeight:r,paddingY:c,bitWidth:f,bitsPerRow:d}=a,p=i.getPacket(),s=i.getDiagramTitle(),k=r+c,g=k*(p.length+1)-(s?0:r),b=f*d+2,h=W(e);h.attr("viewbox",`0 0 ${b} ${g}`),T(h,g,b,a.useMaxWidth);for(const[C,$]of p.entries())U(h,$,C,a);h.append("text").text(s).attr("x",b/2).attr("y",g-k/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=l((t,e,o,{rowHeight:n,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:c,showBits:f})=>{const d=t.append("g"),p=o*(n+a)+a;for(const s of e){const k=s.start%c*r+1,g=(s.end-s.start+1)*r-i;if(d.append("rect").attr("x",k).attr("y",p).attr("width",g).attr("height",n).attr("class","packetBlock"),d.append("text").attr("x",k+g/2).attr("y",p+n/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(s.label),!f)continue;const b=s.end===s.start,h=p-2;d.append("text").attr("x",k+(b?g/2:0)).attr("y",h).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",b?"middle":"start").text(s.start),b||d.append("text").attr("x",k+g).attr("y",h).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(s.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=l(({packet:t}={})=>{const e=v(j,t);return`
import{p as y}from"./chunk-353BL4L5-CjoW3Wzn.js";import{_ as l,s as B,g as S,t as z,q as F,a as P,b as E,F as v,K as W,e as T,z as D,G as _,H as A,l as w}from"./mermaid-vendor-Btdx_ET5.js";import{p as N}from"./treemap-75Q7IDZK-BEnUI63M.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-25sRMAXf.js";import"./_basePickBy-CvPSWTW1.js";import"./clone-C_6aNc6E.js";var x={packet:[]},m=structuredClone(x),L=A.packet,Y=l(()=>{const t=v({...L,..._().packet});return t.showBits&&(t.paddingY+=10),t},"getConfig"),G=l(()=>m.packet,"getPacket"),H=l(t=>{t.length>0&&m.packet.push(t)},"pushWord"),I=l(()=>{D(),m=structuredClone(x)},"clear"),u={pushWord:H,getPacket:G,getConfig:Y,clear:I,setAccTitle:E,getAccTitle:P,setDiagramTitle:F,getDiagramTitle:z,getAccDescription:S,setAccDescription:B},K=1e4,M=l(t=>{y(t,u);let e=-1,o=[],n=1;const{bitsPerRow:i}=u.getConfig();for(let{start:a,end:r,bits:c,label:f}of t.blocks){if(a!==void 0&&r!==void 0&&r<a)throw new Error(`Packet block ${a} - ${r} is invalid. End must be greater than start.`);if(a??(a=e+1),a!==e+1)throw new Error(`Packet block ${a} - ${r??a} is not contiguous. It should start from ${e+1}.`);if(c===0)throw new Error(`Packet block ${a} is invalid. Cannot have a zero bit field.`);for(r??(r=a+(c??1)-1),c??(c=r-a+1),e=r,w.debug(`Packet block ${a} - ${e} with label ${f}`);o.length<=i+1&&u.getPacket().length<K;){const[d,p]=O({start:a,end:r,bits:c,label:f},n,i);if(o.push(d),d.end+1===n*i&&(u.pushWord(o),o=[],n++),!p)break;({start:a,end:r,bits:c,label:f}=p)}}u.pushWord(o)},"populate"),O=l((t,e,o)=>{if(t.start===void 0)throw new Error("start should have been set during first phase");if(t.end===void 0)throw new Error("end should have been set during first phase");if(t.start>t.end)throw new Error(`Block start ${t.start} is greater than block end ${t.end}.`);if(t.end+1<=e*o)return[t,void 0];const n=e*o-1,i=e*o;return[{start:t.start,end:n,label:t.label,bits:n-t.start},{start:i,end:t.end,label:t.label,bits:t.end-i}]},"getNextFittingBlock"),q={parse:l(async t=>{const e=await N("packet",t);w.debug(e),M(e)},"parse")},R=l((t,e,o,n)=>{const i=n.db,a=i.getConfig(),{rowHeight:r,paddingY:c,bitWidth:f,bitsPerRow:d}=a,p=i.getPacket(),s=i.getDiagramTitle(),k=r+c,g=k*(p.length+1)-(s?0:r),b=f*d+2,h=W(e);h.attr("viewbox",`0 0 ${b} ${g}`),T(h,g,b,a.useMaxWidth);for(const[C,$]of p.entries())U(h,$,C,a);h.append("text").text(s).attr("x",b/2).attr("y",g-k/2).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("class","packetTitle")},"draw"),U=l((t,e,o,{rowHeight:n,paddingX:i,paddingY:a,bitWidth:r,bitsPerRow:c,showBits:f})=>{const d=t.append("g"),p=o*(n+a)+a;for(const s of e){const k=s.start%c*r+1,g=(s.end-s.start+1)*r-i;if(d.append("rect").attr("x",k).attr("y",p).attr("width",g).attr("height",n).attr("class","packetBlock"),d.append("text").attr("x",k+g/2).attr("y",p+n/2).attr("class","packetLabel").attr("dominant-baseline","middle").attr("text-anchor","middle").text(s.label),!f)continue;const b=s.end===s.start,h=p-2;d.append("text").attr("x",k+(b?g/2:0)).attr("y",h).attr("class","packetByte start").attr("dominant-baseline","auto").attr("text-anchor",b?"middle":"start").text(s.start),b||d.append("text").attr("x",k+g).attr("y",h).attr("class","packetByte end").attr("dominant-baseline","auto").attr("text-anchor","end").text(s.end)}},"drawWord"),X={draw:R},j={byteFontSize:"10px",startByteColor:"black",endByteColor:"black",labelColor:"black",labelFontSize:"12px",titleColor:"black",titleFontSize:"14px",blockStrokeColor:"black",blockStrokeWidth:"1",blockFillColor:"#efefef"},J=l(({packet:t}={})=>{const e=v(j,t);return`
.packetByte {
font-size: ${e.byteFontSize};
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,4 +1,4 @@
import{g as q1}from"./chunk-E2GYISFI-Bj4t4I37.js";import{_ as m,o as O1,l as ee,c as be,d as Se,p as H1,r as X1,u as i1,b as Q1,s as J1,q as Z1,a as $1,g as et,t as tt,k as st,v as it,J as rt,x as nt,y as s1,z as at,A as ut,B as lt,C as ot}from"./mermaid-vendor-BOzHoVUU.js";import{g as ct}from"./chunk-BFAMUDN2-CmfJj-4x.js";import{s as ht}from"./chunk-SKB7J2MH-Bl1qvyQL.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var dt="flowchart-",Pe,pt=(Pe=class{constructor(){this.vertexCounter=0,this.config=be(),this.vertices=new Map,this.edges=[],this.classes=new Map,this.subGraphs=[],this.subGraphLookup=new Map,this.tooltips=new Map,this.subCount=0,this.firstGraphFlag=!0,this.secCount=-1,this.posCrossRef=[],this.funs=[],this.setAccTitle=Q1,this.setAccDescription=J1,this.setDiagramTitle=Z1,this.getAccTitle=$1,this.getAccDescription=et,this.getDiagramTitle=tt,this.funs.push(this.setupToolTips.bind(this)),this.addVertex=this.addVertex.bind(this),this.firstGraph=this.firstGraph.bind(this),this.setDirection=this.setDirection.bind(this),this.addSubGraph=this.addSubGraph.bind(this),this.addLink=this.addLink.bind(this),this.setLink=this.setLink.bind(this),this.updateLink=this.updateLink.bind(this),this.addClass=this.addClass.bind(this),this.setClass=this.setClass.bind(this),this.destructLink=this.destructLink.bind(this),this.setClickEvent=this.setClickEvent.bind(this),this.setTooltip=this.setTooltip.bind(this),this.updateLinkInterpolate=this.updateLinkInterpolate.bind(this),this.setClickFun=this.setClickFun.bind(this),this.bindFunctions=this.bindFunctions.bind(this),this.lex={firstGraph:this.firstGraph.bind(this)},this.clear(),this.setGen("gen-2")}sanitizeText(i){return st.sanitizeText(i,this.config)}lookUpDomId(i){for(const n of this.vertices.values())if(n.id===i)return n.domId;return i}addVertex(i,n,a,u,l,f,c={},A){var V,C;if(!i||i.trim().length===0)return;let r;if(A!==void 0){let p;A.includes(`
import{g as q1}from"./chunk-E2GYISFI-3iVC229r.js";import{_ as m,o as O1,l as ee,c as be,d as Se,p as H1,r as X1,u as i1,b as Q1,s as J1,q as Z1,a as $1,g as et,t as tt,k as st,v as it,J as rt,x as nt,y as s1,z as at,A as ut,B as lt,C as ot}from"./mermaid-vendor-Btdx_ET5.js";import{g as ct}from"./chunk-BFAMUDN2-BjxoQgRC.js";import{s as ht}from"./chunk-SKB7J2MH-Cuc7MmmW.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var dt="flowchart-",Pe,pt=(Pe=class{constructor(){this.vertexCounter=0,this.config=be(),this.vertices=new Map,this.edges=[],this.classes=new Map,this.subGraphs=[],this.subGraphLookup=new Map,this.tooltips=new Map,this.subCount=0,this.firstGraphFlag=!0,this.secCount=-1,this.posCrossRef=[],this.funs=[],this.setAccTitle=Q1,this.setAccDescription=J1,this.setDiagramTitle=Z1,this.getAccTitle=$1,this.getAccDescription=et,this.getDiagramTitle=tt,this.funs.push(this.setupToolTips.bind(this)),this.addVertex=this.addVertex.bind(this),this.firstGraph=this.firstGraph.bind(this),this.setDirection=this.setDirection.bind(this),this.addSubGraph=this.addSubGraph.bind(this),this.addLink=this.addLink.bind(this),this.setLink=this.setLink.bind(this),this.updateLink=this.updateLink.bind(this),this.addClass=this.addClass.bind(this),this.setClass=this.setClass.bind(this),this.destructLink=this.destructLink.bind(this),this.setClickEvent=this.setClickEvent.bind(this),this.setTooltip=this.setTooltip.bind(this),this.updateLinkInterpolate=this.updateLinkInterpolate.bind(this),this.setClickFun=this.setClickFun.bind(this),this.bindFunctions=this.bindFunctions.bind(this),this.lex={firstGraph:this.firstGraph.bind(this)},this.clear(),this.setGen("gen-2")}sanitizeText(i){return st.sanitizeText(i,this.config)}lookUpDomId(i){for(const n of this.vertices.values())if(n.id===i)return n.domId;return i}addVertex(i,n,a,u,l,f,c={},A){var V,C;if(!i||i.trim().length===0)return;let r;if(A!==void 0){let p;A.includes(`
`)?p=A+`
`:p=`{
`+A+`

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,2 +1,2 @@
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-BOzHoVUU.js";import{p as m}from"./treemap-75Q7IDZK-BCGfM6IV.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-DkG1DiI3.js";import"./_basePickBy-BZlRisJu.js";import"./clone-CXHLa2LR.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version+""},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
import{_ as e,l as o,K as i,e as n,L as p}from"./mermaid-vendor-Btdx_ET5.js";import{p as m}from"./treemap-75Q7IDZK-BEnUI63M.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-25sRMAXf.js";import"./_basePickBy-CvPSWTW1.js";import"./clone-C_6aNc6E.js";var g={parse:e(async r=>{const a=await m("info",r);o.debug(a)},"parse")},v={version:p.version+""},d=e(()=>v.version,"getVersion"),c={getVersion:d},l=e((r,a,s)=>{o.debug(`rendering info diagram
`+r);const t=i(a);n(t,100,400,!0),t.append("g").append("text").attr("x",100).attr("y",40).attr("class","version").attr("font-size",32).style("text-anchor","middle").text(`v${s}`)},"draw"),f={draw:l},L={parser:g,db:c,renderer:f};export{L as diagram};

View file

@ -1,4 +1,4 @@
import{a as gt,g as lt,f as mt,d as xt}from"./chunk-67H74DCK-D0z1MsVm.js";import{g as kt}from"./chunk-E2GYISFI-Bj4t4I37.js";import{_ as r,g as _t,s as bt,a as vt,b as wt,t as Tt,q as St,c as R,d as G,e as $t,z as Mt,N as et}from"./mermaid-vendor-BOzHoVUU.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var U=function(){var t=r(function(h,n,a,l){for(a=a||{},l=h.length;l--;a[h[l]]=n);return a},"o"),e=[6,8,10,11,12,14,16,17,18],s=[1,9],c=[1,10],i=[1,11],f=[1,12],u=[1,13],y=[1,14],g={trace:r(function(){},"trace"),yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,taskName:18,taskData:19,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",18:"taskName",19:"taskData"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,2]],performAction:r(function(n,a,l,d,p,o,v){var k=o.length-1;switch(p){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:d.setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),d.setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),d.setAccDescription(this.$);break;case 12:d.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 13:d.addTask(o[k-1],o[k]),this.$="task";break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,7],{1:[2,1]}),t(e,[2,3]),{9:15,11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,5]),t(e,[2,6]),t(e,[2,8]),{13:[1,16]},{15:[1,17]},t(e,[2,11]),t(e,[2,12]),{19:[1,18]},t(e,[2,4]),t(e,[2,9]),t(e,[2,10]),t(e,[2,13])],defaultActions:{},parseError:r(function(n,a){if(a.recoverable)this.trace(n);else{var l=new Error(n);throw l.hash=a,l}},"parseError"),parse:r(function(n){var a=this,l=[0],d=[],p=[null],o=[],v=this.table,k="",C=0,K=0,dt=2,Q=1,yt=o.slice.call(arguments,1),_=Object.create(this.lexer),I={yy:{}};for(var O in this.yy)Object.prototype.hasOwnProperty.call(this.yy,O)&&(I.yy[O]=this.yy[O]);_.setInput(n,I.yy),I.yy.lexer=_,I.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var Y=_.yylloc;o.push(Y);var ft=_.options&&_.options.ranges;typeof I.yy.parseError=="function"?this.parseError=I.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pt(w){l.length=l.length-2*w,p.length=p.length-w,o.length=o.length-w}r(pt,"popStack");function D(){var w;return w=d.pop()||_.lex()||Q,typeof w!="number"&&(w instanceof Array&&(d=w,w=d.pop()),w=a.symbols_[w]||w),w}r(D,"lex");for(var b,A,T,q,F={},N,M,tt,z;;){if(A=l[l.length-1],this.defaultActions[A]?T=this.defaultActions[A]:((b===null||typeof b>"u")&&(b=D()),T=v[A]&&v[A][b]),typeof T>"u"||!T.length||!T[0]){var X="";z=[];for(N in v[A])this.terminals_[N]&&N>dt&&z.push("'"+this.terminals_[N]+"'");_.showPosition?X="Parse error on line "+(C+1)+`:
import{a as gt,g as lt,f as mt,d as xt}from"./chunk-67H74DCK-CnK7iSGJ.js";import{g as kt}from"./chunk-E2GYISFI-3iVC229r.js";import{_ as r,g as _t,s as bt,a as vt,b as wt,t as Tt,q as St,c as R,d as G,e as $t,z as Mt,N as et}from"./mermaid-vendor-Btdx_ET5.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var U=function(){var t=r(function(h,n,a,l){for(a=a||{},l=h.length;l--;a[h[l]]=n);return a},"o"),e=[6,8,10,11,12,14,16,17,18],s=[1,9],c=[1,10],i=[1,11],f=[1,12],u=[1,13],y=[1,14],g={trace:r(function(){},"trace"),yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,taskName:18,taskData:19,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",18:"taskName",19:"taskData"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,2]],performAction:r(function(n,a,l,d,p,o,v){var k=o.length-1;switch(p){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:d.setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),d.setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),d.setAccDescription(this.$);break;case 12:d.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 13:d.addTask(o[k-1],o[k]),this.$="task";break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,7],{1:[2,1]}),t(e,[2,3]),{9:15,11:s,12:c,14:i,16:f,17:u,18:y},t(e,[2,5]),t(e,[2,6]),t(e,[2,8]),{13:[1,16]},{15:[1,17]},t(e,[2,11]),t(e,[2,12]),{19:[1,18]},t(e,[2,4]),t(e,[2,9]),t(e,[2,10]),t(e,[2,13])],defaultActions:{},parseError:r(function(n,a){if(a.recoverable)this.trace(n);else{var l=new Error(n);throw l.hash=a,l}},"parseError"),parse:r(function(n){var a=this,l=[0],d=[],p=[null],o=[],v=this.table,k="",C=0,K=0,dt=2,Q=1,yt=o.slice.call(arguments,1),_=Object.create(this.lexer),I={yy:{}};for(var O in this.yy)Object.prototype.hasOwnProperty.call(this.yy,O)&&(I.yy[O]=this.yy[O]);_.setInput(n,I.yy),I.yy.lexer=_,I.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var Y=_.yylloc;o.push(Y);var ft=_.options&&_.options.ranges;typeof I.yy.parseError=="function"?this.parseError=I.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pt(w){l.length=l.length-2*w,p.length=p.length-w,o.length=o.length-w}r(pt,"popStack");function D(){var w;return w=d.pop()||_.lex()||Q,typeof w!="number"&&(w instanceof Array&&(d=w,w=d.pop()),w=a.symbols_[w]||w),w}r(D,"lex");for(var b,A,T,q,F={},N,M,tt,z;;){if(A=l[l.length-1],this.defaultActions[A]?T=this.defaultActions[A]:((b===null||typeof b>"u")&&(b=D()),T=v[A]&&v[A][b]),typeof T>"u"||!T.length||!T[0]){var X="";z=[];for(N in v[A])this.terminals_[N]&&N>dt&&z.push("'"+this.terminals_[N]+"'");_.showPosition?X="Parse error on line "+(C+1)+`:
`+_.showPosition()+`
Expecting `+z.join(", ")+", got '"+(this.terminals_[b]||b)+"'":X="Parse error on line "+(C+1)+": Unexpected "+(b==Q?"end of input":"'"+(this.terminals_[b]||b)+"'"),this.parseError(X,{text:_.match,token:this.terminals_[b]||b,line:_.yylineno,loc:Y,expected:z})}if(T[0]instanceof Array&&T.length>1)throw new Error("Parse Error: multiple actions possible at state: "+A+", token: "+b);switch(T[0]){case 1:l.push(b),p.push(_.yytext),o.push(_.yylloc),l.push(T[1]),b=null,K=_.yyleng,k=_.yytext,C=_.yylineno,Y=_.yylloc;break;case 2:if(M=this.productions_[T[1]][1],F.$=p[p.length-M],F._$={first_line:o[o.length-(M||1)].first_line,last_line:o[o.length-1].last_line,first_column:o[o.length-(M||1)].first_column,last_column:o[o.length-1].last_column},ft&&(F._$.range=[o[o.length-(M||1)].range[0],o[o.length-1].range[1]]),q=this.performAction.apply(F,[k,K,C,I.yy,T[1],p,o].concat(yt)),typeof q<"u")return q;M&&(l=l.slice(0,-1*M*2),p=p.slice(0,-1*M),o=o.slice(0,-1*M)),l.push(this.productions_[T[1]][0]),p.push(F.$),o.push(F._$),tt=v[l[l.length-2]][l[l.length-1]],l.push(tt);break;case 3:return!0}}return!0},"parse")},m=function(){var h={EOF:1,parseError:r(function(a,l){if(this.yy.parser)this.yy.parser.parseError(a,l);else throw new Error(a)},"parseError"),setInput:r(function(n,a){return this.yy=a||this.yy||{},this._input=n,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:r(function(){var n=this._input[0];this.yytext+=n,this.yyleng++,this.offset++,this.match+=n,this.matched+=n;var a=n.match(/(?:\r\n?|\n).*/g);return a?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),n},"input"),unput:r(function(n){var a=n.length,l=n.split(/(?:\r\n?|\n)/g);this._input=n+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-a),this.offset-=a;var d=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),l.length-1&&(this.yylineno-=l.length-1);var p=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:l?(l.length===d.length?this.yylloc.first_column:0)+d[d.length-l.length].length-l[0].length:this.yylloc.first_column-a},this.options.ranges&&(this.yylloc.range=[p[0],p[0]+this.yyleng-a]),this.yyleng=this.yytext.length,this},"unput"),more:r(function(){return this._more=!0,this},"more"),reject:r(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:r(function(n){this.unput(this.match.slice(n))},"less"),pastInput:r(function(){var n=this.matched.substr(0,this.matched.length-this.match.length);return(n.length>20?"...":"")+n.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:r(function(){var n=this.match;return n.length<20&&(n+=this._input.substr(0,20-n.length)),(n.substr(0,20)+(n.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:r(function(){var n=this.pastInput(),a=new Array(n.length+1).join("-");return n+this.upcomingInput()+`

View file

@ -1,4 +1,4 @@
import{g as fe}from"./chunk-E2GYISFI-Bj4t4I37.js";import{_ as c,l as te,c as W,K as ye,a8 as be,a9 as me,aa as _e,a3 as Ee,H as Y,i as G,v as ke,J as Se,a4 as Ne,a5 as le,a6 as ce}from"./mermaid-vendor-BOzHoVUU.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var $=function(){var t=c(function(_,s,n,a){for(n=n||{},a=_.length;a--;n[_[a]]=s);return n},"o"),g=[1,4],d=[1,13],r=[1,12],p=[1,15],E=[1,16],f=[1,20],h=[1,19],L=[6,7,8],C=[1,26],w=[1,24],N=[1,25],i=[6,7,11],H=[1,31],x=[6,7,11,24],P=[1,6,13,16,17,20,23],M=[1,35],U=[1,36],A=[1,6,7,11,13,16,17,20,23],j=[1,38],V={trace:c(function(){},"trace"),yy:{},symbols_:{error:2,start:3,mindMap:4,spaceLines:5,SPACELINE:6,NL:7,KANBAN:8,document:9,stop:10,EOF:11,statement:12,SPACELIST:13,node:14,shapeData:15,ICON:16,CLASS:17,nodeWithId:18,nodeWithoutId:19,NODE_DSTART:20,NODE_DESCR:21,NODE_DEND:22,NODE_ID:23,SHAPE_DATA:24,$accept:0,$end:1},terminals_:{2:"error",6:"SPACELINE",7:"NL",8:"KANBAN",11:"EOF",13:"SPACELIST",16:"ICON",17:"CLASS",20:"NODE_DSTART",21:"NODE_DESCR",22:"NODE_DEND",23:"NODE_ID",24:"SHAPE_DATA"},productions_:[0,[3,1],[3,2],[5,1],[5,2],[5,2],[4,2],[4,3],[10,1],[10,1],[10,1],[10,2],[10,2],[9,3],[9,2],[12,3],[12,2],[12,2],[12,2],[12,1],[12,2],[12,1],[12,1],[12,1],[12,1],[14,1],[14,1],[19,3],[18,1],[18,4],[15,2],[15,1]],performAction:c(function(s,n,a,o,u,e,B){var l=e.length-1;switch(u){case 6:case 7:return o;case 8:o.getLogger().trace("Stop NL ");break;case 9:o.getLogger().trace("Stop EOF ");break;case 11:o.getLogger().trace("Stop NL2 ");break;case 12:o.getLogger().trace("Stop EOF2 ");break;case 15:o.getLogger().info("Node: ",e[l-1].id),o.addNode(e[l-2].length,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 16:o.getLogger().info("Node: ",e[l].id),o.addNode(e[l-1].length,e[l].id,e[l].descr,e[l].type);break;case 17:o.getLogger().trace("Icon: ",e[l]),o.decorateNode({icon:e[l]});break;case 18:case 23:o.decorateNode({class:e[l]});break;case 19:o.getLogger().trace("SPACELIST");break;case 20:o.getLogger().trace("Node: ",e[l-1].id),o.addNode(0,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 21:o.getLogger().trace("Node: ",e[l].id),o.addNode(0,e[l].id,e[l].descr,e[l].type);break;case 22:o.decorateNode({icon:e[l]});break;case 27:o.getLogger().trace("node found ..",e[l-2]),this.$={id:e[l-1],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 28:this.$={id:e[l],descr:e[l],type:0};break;case 29:o.getLogger().trace("node found ..",e[l-3]),this.$={id:e[l-3],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 30:this.$=e[l-1]+e[l];break;case 31:this.$=e[l];break}},"anonymous"),table:[{3:1,4:2,5:3,6:[1,5],8:g},{1:[3]},{1:[2,1]},{4:6,6:[1,7],7:[1,8],8:g},{6:d,7:[1,10],9:9,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(L,[2,3]),{1:[2,2]},t(L,[2,4]),t(L,[2,5]),{1:[2,6],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:d,9:22,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:C,7:w,10:23,11:N},t(i,[2,24],{18:17,19:18,14:27,16:[1,28],17:[1,29],20:f,23:h}),t(i,[2,19]),t(i,[2,21],{15:30,24:H}),t(i,[2,22]),t(i,[2,23]),t(x,[2,25]),t(x,[2,26]),t(x,[2,28],{20:[1,32]}),{21:[1,33]},{6:C,7:w,10:34,11:N},{1:[2,7],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(P,[2,14],{7:M,11:U}),t(A,[2,8]),t(A,[2,9]),t(A,[2,10]),t(i,[2,16],{15:37,24:H}),t(i,[2,17]),t(i,[2,18]),t(i,[2,20],{24:j}),t(x,[2,31]),{21:[1,39]},{22:[1,40]},t(P,[2,13],{7:M,11:U}),t(A,[2,11]),t(A,[2,12]),t(i,[2,15],{24:j}),t(x,[2,30]),{22:[1,41]},t(x,[2,27]),t(x,[2,29])],defaultActions:{2:[2,1],6:[2,2]},parseError:c(function(s,n){if(n.recoverable)this.trace(s);else{var a=new Error(s);throw a.hash=n,a}},"parseError"),parse:c(function(s){var n=this,a=[0],o=[],u=[null],e=[],B=this.table,l="",z=0,ie=0,ue=2,re=1,ge=e.slice.call(arguments,1),b=Object.create(this.lexer),T={yy:{}};for(var J in this.yy)Object.prototype.hasOwnProperty.call(this.yy,J)&&(T.yy[J]=this.yy[J]);b.setInput(s,T.yy),T.yy.lexer=b,T.yy.parser=this,typeof b.yylloc>"u"&&(b.yylloc={});var q=b.yylloc;e.push(q);var de=b.options&&b.options.ranges;typeof T.yy.parseError=="function"?this.parseError=T.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pe(S){a.length=a.length-2*S,u.length=u.length-S,e.length=e.length-S}c(pe,"popStack");function ae(){var S;return S=o.pop()||b.lex()||re,typeof S!="number"&&(S instanceof Array&&(o=S,S=o.pop()),S=n.symbols_[S]||S),S}c(ae,"lex");for(var k,R,v,Q,F={},K,I,oe,X;;){if(R=a[a.length-1],this.defaultActions[R]?v=this.defaultActions[R]:((k===null||typeof k>"u")&&(k=ae()),v=B[R]&&B[R][k]),typeof v>"u"||!v.length||!v[0]){var Z="";X=[];for(K in B[R])this.terminals_[K]&&K>ue&&X.push("'"+this.terminals_[K]+"'");b.showPosition?Z="Parse error on line "+(z+1)+`:
import{g as fe}from"./chunk-E2GYISFI-3iVC229r.js";import{_ as c,l as te,c as W,K as ye,a8 as be,a9 as me,aa as _e,a3 as Ee,H as Y,i as G,v as ke,J as Se,a4 as Ne,a5 as le,a6 as ce}from"./mermaid-vendor-Btdx_ET5.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var $=function(){var t=c(function(_,s,n,a){for(n=n||{},a=_.length;a--;n[_[a]]=s);return n},"o"),g=[1,4],d=[1,13],r=[1,12],p=[1,15],E=[1,16],f=[1,20],h=[1,19],L=[6,7,8],C=[1,26],w=[1,24],N=[1,25],i=[6,7,11],H=[1,31],x=[6,7,11,24],P=[1,6,13,16,17,20,23],M=[1,35],U=[1,36],A=[1,6,7,11,13,16,17,20,23],j=[1,38],V={trace:c(function(){},"trace"),yy:{},symbols_:{error:2,start:3,mindMap:4,spaceLines:5,SPACELINE:6,NL:7,KANBAN:8,document:9,stop:10,EOF:11,statement:12,SPACELIST:13,node:14,shapeData:15,ICON:16,CLASS:17,nodeWithId:18,nodeWithoutId:19,NODE_DSTART:20,NODE_DESCR:21,NODE_DEND:22,NODE_ID:23,SHAPE_DATA:24,$accept:0,$end:1},terminals_:{2:"error",6:"SPACELINE",7:"NL",8:"KANBAN",11:"EOF",13:"SPACELIST",16:"ICON",17:"CLASS",20:"NODE_DSTART",21:"NODE_DESCR",22:"NODE_DEND",23:"NODE_ID",24:"SHAPE_DATA"},productions_:[0,[3,1],[3,2],[5,1],[5,2],[5,2],[4,2],[4,3],[10,1],[10,1],[10,1],[10,2],[10,2],[9,3],[9,2],[12,3],[12,2],[12,2],[12,2],[12,1],[12,2],[12,1],[12,1],[12,1],[12,1],[14,1],[14,1],[19,3],[18,1],[18,4],[15,2],[15,1]],performAction:c(function(s,n,a,o,u,e,B){var l=e.length-1;switch(u){case 6:case 7:return o;case 8:o.getLogger().trace("Stop NL ");break;case 9:o.getLogger().trace("Stop EOF ");break;case 11:o.getLogger().trace("Stop NL2 ");break;case 12:o.getLogger().trace("Stop EOF2 ");break;case 15:o.getLogger().info("Node: ",e[l-1].id),o.addNode(e[l-2].length,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 16:o.getLogger().info("Node: ",e[l].id),o.addNode(e[l-1].length,e[l].id,e[l].descr,e[l].type);break;case 17:o.getLogger().trace("Icon: ",e[l]),o.decorateNode({icon:e[l]});break;case 18:case 23:o.decorateNode({class:e[l]});break;case 19:o.getLogger().trace("SPACELIST");break;case 20:o.getLogger().trace("Node: ",e[l-1].id),o.addNode(0,e[l-1].id,e[l-1].descr,e[l-1].type,e[l]);break;case 21:o.getLogger().trace("Node: ",e[l].id),o.addNode(0,e[l].id,e[l].descr,e[l].type);break;case 22:o.decorateNode({icon:e[l]});break;case 27:o.getLogger().trace("node found ..",e[l-2]),this.$={id:e[l-1],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 28:this.$={id:e[l],descr:e[l],type:0};break;case 29:o.getLogger().trace("node found ..",e[l-3]),this.$={id:e[l-3],descr:e[l-1],type:o.getType(e[l-2],e[l])};break;case 30:this.$=e[l-1]+e[l];break;case 31:this.$=e[l];break}},"anonymous"),table:[{3:1,4:2,5:3,6:[1,5],8:g},{1:[3]},{1:[2,1]},{4:6,6:[1,7],7:[1,8],8:g},{6:d,7:[1,10],9:9,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(L,[2,3]),{1:[2,2]},t(L,[2,4]),t(L,[2,5]),{1:[2,6],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:d,9:22,12:11,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},{6:C,7:w,10:23,11:N},t(i,[2,24],{18:17,19:18,14:27,16:[1,28],17:[1,29],20:f,23:h}),t(i,[2,19]),t(i,[2,21],{15:30,24:H}),t(i,[2,22]),t(i,[2,23]),t(x,[2,25]),t(x,[2,26]),t(x,[2,28],{20:[1,32]}),{21:[1,33]},{6:C,7:w,10:34,11:N},{1:[2,7],6:d,12:21,13:r,14:14,16:p,17:E,18:17,19:18,20:f,23:h},t(P,[2,14],{7:M,11:U}),t(A,[2,8]),t(A,[2,9]),t(A,[2,10]),t(i,[2,16],{15:37,24:H}),t(i,[2,17]),t(i,[2,18]),t(i,[2,20],{24:j}),t(x,[2,31]),{21:[1,39]},{22:[1,40]},t(P,[2,13],{7:M,11:U}),t(A,[2,11]),t(A,[2,12]),t(i,[2,15],{24:j}),t(x,[2,30]),{22:[1,41]},t(x,[2,27]),t(x,[2,29])],defaultActions:{2:[2,1],6:[2,2]},parseError:c(function(s,n){if(n.recoverable)this.trace(s);else{var a=new Error(s);throw a.hash=n,a}},"parseError"),parse:c(function(s){var n=this,a=[0],o=[],u=[null],e=[],B=this.table,l="",z=0,ie=0,ue=2,re=1,ge=e.slice.call(arguments,1),b=Object.create(this.lexer),T={yy:{}};for(var J in this.yy)Object.prototype.hasOwnProperty.call(this.yy,J)&&(T.yy[J]=this.yy[J]);b.setInput(s,T.yy),T.yy.lexer=b,T.yy.parser=this,typeof b.yylloc>"u"&&(b.yylloc={});var q=b.yylloc;e.push(q);var de=b.options&&b.options.ranges;typeof T.yy.parseError=="function"?this.parseError=T.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function pe(S){a.length=a.length-2*S,u.length=u.length-S,e.length=e.length-S}c(pe,"popStack");function ae(){var S;return S=o.pop()||b.lex()||re,typeof S!="number"&&(S instanceof Array&&(o=S,S=o.pop()),S=n.symbols_[S]||S),S}c(ae,"lex");for(var k,R,v,Q,F={},K,I,oe,X;;){if(R=a[a.length-1],this.defaultActions[R]?v=this.defaultActions[R]:((k===null||typeof k>"u")&&(k=ae()),v=B[R]&&B[R][k]),typeof v>"u"||!v.length||!v[0]){var Z="";X=[];for(K in B[R])this.terminals_[K]&&K>ue&&X.push("'"+this.terminals_[K]+"'");b.showPosition?Z="Parse error on line "+(z+1)+`:
`+b.showPosition()+`
Expecting `+X.join(", ")+", got '"+(this.terminals_[k]||k)+"'":Z="Parse error on line "+(z+1)+": Unexpected "+(k==re?"end of input":"'"+(this.terminals_[k]||k)+"'"),this.parseError(Z,{text:b.match,token:this.terminals_[k]||k,line:b.yylineno,loc:q,expected:X})}if(v[0]instanceof Array&&v.length>1)throw new Error("Parse Error: multiple actions possible at state: "+R+", token: "+k);switch(v[0]){case 1:a.push(k),u.push(b.yytext),e.push(b.yylloc),a.push(v[1]),k=null,ie=b.yyleng,l=b.yytext,z=b.yylineno,q=b.yylloc;break;case 2:if(I=this.productions_[v[1]][1],F.$=u[u.length-I],F._$={first_line:e[e.length-(I||1)].first_line,last_line:e[e.length-1].last_line,first_column:e[e.length-(I||1)].first_column,last_column:e[e.length-1].last_column},de&&(F._$.range=[e[e.length-(I||1)].range[0],e[e.length-1].range[1]]),Q=this.performAction.apply(F,[l,ie,z,T.yy,v[1],u,e].concat(ge)),typeof Q<"u")return Q;I&&(a=a.slice(0,-1*I*2),u=u.slice(0,-1*I),e=e.slice(0,-1*I)),a.push(this.productions_[v[1]][0]),u.push(F.$),e.push(F._$),oe=B[a[a.length-2]][a[a.length-1]],a.push(oe);break;case 3:return!0}}return!0},"parse")},m=function(){var _={EOF:1,parseError:c(function(n,a){if(this.yy.parser)this.yy.parser.parseError(n,a);else throw new Error(n)},"parseError"),setInput:c(function(s,n){return this.yy=n||this.yy||{},this._input=s,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:c(function(){var s=this._input[0];this.yytext+=s,this.yyleng++,this.offset++,this.match+=s,this.matched+=s;var n=s.match(/(?:\r\n?|\n).*/g);return n?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),s},"input"),unput:c(function(s){var n=s.length,a=s.split(/(?:\r\n?|\n)/g);this._input=s+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-n),this.offset-=n;var o=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),a.length-1&&(this.yylineno-=a.length-1);var u=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:a?(a.length===o.length?this.yylloc.first_column:0)+o[o.length-a.length].length-a[0].length:this.yylloc.first_column-n},this.options.ranges&&(this.yylloc.range=[u[0],u[0]+this.yyleng-n]),this.yyleng=this.yytext.length,this},"unput"),more:c(function(){return this._more=!0,this},"more"),reject:c(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:c(function(s){this.unput(this.match.slice(s))},"less"),pastInput:c(function(){var s=this.matched.substr(0,this.matched.length-this.match.length);return(s.length>20?"...":"")+s.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:c(function(){var s=this.match;return s.length<20&&(s+=this._input.substr(0,20-s.length)),(s.substr(0,20)+(s.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:c(function(){var s=this.pastInput(),n=new Array(s.length+1).join("-");return s+this.upcomingInput()+`

File diff suppressed because one or more lines are too long

View file

@ -1,4 +1,4 @@
import{p as N}from"./chunk-353BL4L5-DshwP1ma.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-BOzHoVUU.js";import{p as rt}from"./treemap-75Q7IDZK-BCGfM6IV.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-DkG1DiI3.js";import"./_basePickBy-BZlRisJu.js";import"./clone-CXHLa2LR.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
import{p as N}from"./chunk-353BL4L5-CjoW3Wzn.js";import{_ as i,g as B,s as U,a as q,b as H,t as K,q as V,l as C,c as Z,F as j,K as J,M as Q,N as z,O as X,e as Y,z as tt,P as et,H as at}from"./mermaid-vendor-Btdx_ET5.js";import{p as rt}from"./treemap-75Q7IDZK-BEnUI63M.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";import"./_baseUniq-25sRMAXf.js";import"./_basePickBy-CvPSWTW1.js";import"./clone-C_6aNc6E.js";var it=at.pie,D={sections:new Map,showData:!1},f=D.sections,w=D.showData,st=structuredClone(it),ot=i(()=>structuredClone(st),"getConfig"),nt=i(()=>{f=new Map,w=D.showData,tt()},"clear"),lt=i(({label:t,value:a})=>{f.has(t)||(f.set(t,a),C.debug(`added new section: ${t}, with value: ${a}`))},"addSection"),ct=i(()=>f,"getSections"),pt=i(t=>{w=t},"setShowData"),dt=i(()=>w,"getShowData"),F={getConfig:ot,clear:nt,setDiagramTitle:V,getDiagramTitle:K,setAccTitle:H,getAccTitle:q,setAccDescription:U,getAccDescription:B,addSection:lt,getSections:ct,setShowData:pt,getShowData:dt},gt=i((t,a)=>{N(t,a),a.setShowData(t.showData),t.sections.map(a.addSection)},"populateDb"),ut={parse:i(async t=>{const a=await rt("pie",t);C.debug(a),gt(a,F)},"parse")},mt=i(t=>`
.pieCircle{
stroke: ${t.pieStrokeColor};
stroke-width : ${t.pieStrokeWidth};

View file

@ -1 +1 @@
import{s as r,b as e,a,S as i}from"./chunk-OW32GOEJ-Cv4eoNWH.js";import{_ as s}from"./mermaid-vendor-BOzHoVUU.js";import"./chunk-BFAMUDN2-CmfJj-4x.js";import"./chunk-SKB7J2MH-Bl1qvyQL.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:a,get db(){return new i(2)},renderer:e,styles:r,init:s(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{f as diagram};
import{s as r,b as e,a,S as i}from"./chunk-OW32GOEJ-ZlCav04U.js";import{_ as s}from"./mermaid-vendor-Btdx_ET5.js";import"./chunk-BFAMUDN2-BjxoQgRC.js";import"./chunk-SKB7J2MH-Cuc7MmmW.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var f={parser:a,get db(){return new i(2)},renderer:e,styles:r,init:s(t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute},"init")};export{f as diagram};

View file

@ -1,4 +1,4 @@
import{_ as s,c as xt,l as E,d as q,a3 as kt,a4 as _t,a5 as bt,a6 as vt,N as nt,D as wt,a7 as St,z as Et}from"./mermaid-vendor-BOzHoVUU.js";import"./feature-graph-bahMe5Gt.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var X=function(){var n=s(function(f,r,a,h){for(a=a||{},h=f.length;h--;a[f[h]]=r);return a},"o"),t=[6,8,10,11,12,14,16,17,20,21],e=[1,9],l=[1,10],i=[1,11],d=[1,12],c=[1,13],g=[1,16],m=[1,17],p={trace:s(function(){},"trace"),yy:{},symbols_:{error:2,start:3,timeline:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,period_statement:18,event_statement:19,period:20,event:21,$accept:0,$end:1},terminals_:{2:"error",4:"timeline",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",20:"period",21:"event"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,1],[9,1],[18,1],[19,1]],performAction:s(function(r,a,h,u,y,o,S){var k=o.length-1;switch(y){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:u.getCommonDb().setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),u.getCommonDb().setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),u.getCommonDb().setAccDescription(this.$);break;case 12:u.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 15:u.addTask(o[k],0,""),this.$=o[k];break;case 16:u.addEvent(o[k].substr(2)),this.$=o[k];break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},n(t,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,7],{1:[2,1]}),n(t,[2,3]),{9:18,11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,5]),n(t,[2,6]),n(t,[2,8]),{13:[1,19]},{15:[1,20]},n(t,[2,11]),n(t,[2,12]),n(t,[2,13]),n(t,[2,14]),n(t,[2,15]),n(t,[2,16]),n(t,[2,4]),n(t,[2,9]),n(t,[2,10])],defaultActions:{},parseError:s(function(r,a){if(a.recoverable)this.trace(r);else{var h=new Error(r);throw h.hash=a,h}},"parseError"),parse:s(function(r){var a=this,h=[0],u=[],y=[null],o=[],S=this.table,k="",M=0,C=0,B=2,J=1,O=o.slice.call(arguments,1),_=Object.create(this.lexer),N={yy:{}};for(var L in this.yy)Object.prototype.hasOwnProperty.call(this.yy,L)&&(N.yy[L]=this.yy[L]);_.setInput(r,N.yy),N.yy.lexer=_,N.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var v=_.yylloc;o.push(v);var $=_.options&&_.options.ranges;typeof N.yy.parseError=="function"?this.parseError=N.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function R(T){h.length=h.length-2*T,y.length=y.length-T,o.length=o.length-T}s(R,"popStack");function A(){var T;return T=u.pop()||_.lex()||J,typeof T!="number"&&(T instanceof Array&&(u=T,T=u.pop()),T=a.symbols_[T]||T),T}s(A,"lex");for(var w,H,I,K,F={},j,P,et,G;;){if(H=h[h.length-1],this.defaultActions[H]?I=this.defaultActions[H]:((w===null||typeof w>"u")&&(w=A()),I=S[H]&&S[H][w]),typeof I>"u"||!I.length||!I[0]){var Q="";G=[];for(j in S[H])this.terminals_[j]&&j>B&&G.push("'"+this.terminals_[j]+"'");_.showPosition?Q="Parse error on line "+(M+1)+`:
import{_ as s,c as xt,l as E,d as q,a3 as kt,a4 as _t,a5 as bt,a6 as vt,N as nt,D as wt,a7 as St,z as Et}from"./mermaid-vendor-Btdx_ET5.js";import"./feature-graph-B2JqR-0F.js";import"./react-vendor-DEwriMA6.js";import"./graph-vendor-B-X5JegA.js";import"./ui-vendor-CeCm8EER.js";import"./utils-vendor-BysuhMZA.js";var X=function(){var n=s(function(f,r,a,h){for(a=a||{},h=f.length;h--;a[f[h]]=r);return a},"o"),t=[6,8,10,11,12,14,16,17,20,21],e=[1,9],l=[1,10],i=[1,11],d=[1,12],c=[1,13],g=[1,16],m=[1,17],p={trace:s(function(){},"trace"),yy:{},symbols_:{error:2,start:3,timeline:4,document:5,EOF:6,line:7,SPACE:8,statement:9,NEWLINE:10,title:11,acc_title:12,acc_title_value:13,acc_descr:14,acc_descr_value:15,acc_descr_multiline_value:16,section:17,period_statement:18,event_statement:19,period:20,event:21,$accept:0,$end:1},terminals_:{2:"error",4:"timeline",6:"EOF",8:"SPACE",10:"NEWLINE",11:"title",12:"acc_title",13:"acc_title_value",14:"acc_descr",15:"acc_descr_value",16:"acc_descr_multiline_value",17:"section",20:"period",21:"event"},productions_:[0,[3,3],[5,0],[5,2],[7,2],[7,1],[7,1],[7,1],[9,1],[9,2],[9,2],[9,1],[9,1],[9,1],[9,1],[18,1],[19,1]],performAction:s(function(r,a,h,u,y,o,S){var k=o.length-1;switch(y){case 1:return o[k-1];case 2:this.$=[];break;case 3:o[k-1].push(o[k]),this.$=o[k-1];break;case 4:case 5:this.$=o[k];break;case 6:case 7:this.$=[];break;case 8:u.getCommonDb().setDiagramTitle(o[k].substr(6)),this.$=o[k].substr(6);break;case 9:this.$=o[k].trim(),u.getCommonDb().setAccTitle(this.$);break;case 10:case 11:this.$=o[k].trim(),u.getCommonDb().setAccDescription(this.$);break;case 12:u.addSection(o[k].substr(8)),this.$=o[k].substr(8);break;case 15:u.addTask(o[k],0,""),this.$=o[k];break;case 16:u.addEvent(o[k].substr(2)),this.$=o[k];break}},"anonymous"),table:[{3:1,4:[1,2]},{1:[3]},n(t,[2,2],{5:3}),{6:[1,4],7:5,8:[1,6],9:7,10:[1,8],11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,7],{1:[2,1]}),n(t,[2,3]),{9:18,11:e,12:l,14:i,16:d,17:c,18:14,19:15,20:g,21:m},n(t,[2,5]),n(t,[2,6]),n(t,[2,8]),{13:[1,19]},{15:[1,20]},n(t,[2,11]),n(t,[2,12]),n(t,[2,13]),n(t,[2,14]),n(t,[2,15]),n(t,[2,16]),n(t,[2,4]),n(t,[2,9]),n(t,[2,10])],defaultActions:{},parseError:s(function(r,a){if(a.recoverable)this.trace(r);else{var h=new Error(r);throw h.hash=a,h}},"parseError"),parse:s(function(r){var a=this,h=[0],u=[],y=[null],o=[],S=this.table,k="",M=0,C=0,B=2,J=1,O=o.slice.call(arguments,1),_=Object.create(this.lexer),N={yy:{}};for(var L in this.yy)Object.prototype.hasOwnProperty.call(this.yy,L)&&(N.yy[L]=this.yy[L]);_.setInput(r,N.yy),N.yy.lexer=_,N.yy.parser=this,typeof _.yylloc>"u"&&(_.yylloc={});var v=_.yylloc;o.push(v);var $=_.options&&_.options.ranges;typeof N.yy.parseError=="function"?this.parseError=N.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function R(T){h.length=h.length-2*T,y.length=y.length-T,o.length=o.length-T}s(R,"popStack");function A(){var T;return T=u.pop()||_.lex()||J,typeof T!="number"&&(T instanceof Array&&(u=T,T=u.pop()),T=a.symbols_[T]||T),T}s(A,"lex");for(var w,H,I,K,F={},j,P,et,G;;){if(H=h[h.length-1],this.defaultActions[H]?I=this.defaultActions[H]:((w===null||typeof w>"u")&&(w=A()),I=S[H]&&S[H][w]),typeof I>"u"||!I.length||!I[0]){var Q="";G=[];for(j in S[H])this.terminals_[j]&&j>B&&G.push("'"+this.terminals_[j]+"'");_.showPosition?Q="Parse error on line "+(M+1)+`:
`+_.showPosition()+`
Expecting `+G.join(", ")+", got '"+(this.terminals_[w]||w)+"'":Q="Parse error on line "+(M+1)+": Unexpected "+(w==J?"end of input":"'"+(this.terminals_[w]||w)+"'"),this.parseError(Q,{text:_.match,token:this.terminals_[w]||w,line:_.yylineno,loc:v,expected:G})}if(I[0]instanceof Array&&I.length>1)throw new Error("Parse Error: multiple actions possible at state: "+H+", token: "+w);switch(I[0]){case 1:h.push(w),y.push(_.yytext),o.push(_.yylloc),h.push(I[1]),w=null,C=_.yyleng,k=_.yytext,M=_.yylineno,v=_.yylloc;break;case 2:if(P=this.productions_[I[1]][1],F.$=y[y.length-P],F._$={first_line:o[o.length-(P||1)].first_line,last_line:o[o.length-1].last_line,first_column:o[o.length-(P||1)].first_column,last_column:o[o.length-1].last_column},$&&(F._$.range=[o[o.length-(P||1)].range[0],o[o.length-1].range[1]]),K=this.performAction.apply(F,[k,C,M,N.yy,I[1],y,o].concat(O)),typeof K<"u")return K;P&&(h=h.slice(0,-1*P*2),y=y.slice(0,-1*P),o=o.slice(0,-1*P)),h.push(this.productions_[I[1]][0]),y.push(F.$),o.push(F._$),et=S[h[h.length-2]][h[h.length-1]],h.push(et);break;case 3:return!0}}return!0},"parse")},x=function(){var f={EOF:1,parseError:s(function(a,h){if(this.yy.parser)this.yy.parser.parseError(a,h);else throw new Error(a)},"parseError"),setInput:s(function(r,a){return this.yy=a||this.yy||{},this._input=r,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},"setInput"),input:s(function(){var r=this._input[0];this.yytext+=r,this.yyleng++,this.offset++,this.match+=r,this.matched+=r;var a=r.match(/(?:\r\n?|\n).*/g);return a?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),r},"input"),unput:s(function(r){var a=r.length,h=r.split(/(?:\r\n?|\n)/g);this._input=r+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-a),this.offset-=a;var u=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),h.length-1&&(this.yylineno-=h.length-1);var y=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:h?(h.length===u.length?this.yylloc.first_column:0)+u[u.length-h.length].length-h[0].length:this.yylloc.first_column-a},this.options.ranges&&(this.yylloc.range=[y[0],y[0]+this.yyleng-a]),this.yyleng=this.yytext.length,this},"unput"),more:s(function(){return this._more=!0,this},"more"),reject:s(function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},"reject"),less:s(function(r){this.unput(this.match.slice(r))},"less"),pastInput:s(function(){var r=this.matched.substr(0,this.matched.length-this.match.length);return(r.length>20?"...":"")+r.substr(-20).replace(/\n/g,"")},"pastInput"),upcomingInput:s(function(){var r=this.match;return r.length<20&&(r+=this._input.substr(0,20-r.length)),(r.substr(0,20)+(r.length>20?"...":"")).replace(/\n/g,"")},"upcomingInput"),showPosition:s(function(){var r=this.pastInput(),a=new Array(r.length+1).join("-");return r+this.upcomingInput()+`

View file

@ -8,18 +8,18 @@
<link rel="icon" type="image/png" href="favicon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Lightrag</title>
<script type="module" crossorigin src="/webui/assets/index-BqzR_4SY.js"></script>
<script type="module" crossorigin src="/webui/assets/index-Bgbh_BKw.js"></script>
<link rel="modulepreload" crossorigin href="/webui/assets/react-vendor-DEwriMA6.js">
<link rel="modulepreload" crossorigin href="/webui/assets/ui-vendor-CeCm8EER.js">
<link rel="modulepreload" crossorigin href="/webui/assets/graph-vendor-B-X5JegA.js">
<link rel="modulepreload" crossorigin href="/webui/assets/utils-vendor-BysuhMZA.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-bahMe5Gt.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-nDnos1H0.js">
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-BOzHoVUU.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-graph-B2JqR-0F.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-documents-5YuUz-b0.js">
<link rel="modulepreload" crossorigin href="/webui/assets/mermaid-vendor-Btdx_ET5.js">
<link rel="modulepreload" crossorigin href="/webui/assets/markdown-vendor-DmIvJdn7.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-DiuUfFgI.js">
<link rel="modulepreload" crossorigin href="/webui/assets/feature-retrieval-D3LH9QjI.js">
<link rel="stylesheet" crossorigin href="/webui/assets/feature-graph-BipNuM18.css">
<link rel="stylesheet" crossorigin href="/webui/assets/index-BvrNHAMA.css">
<link rel="stylesheet" crossorigin href="/webui/assets/index-CCBiRoRx.css">
</head>
<body>
<div id="root"></div>

View file

@ -671,6 +671,29 @@ class BaseGraphStorage(StorageNameSpace, ABC):
A list of all edges, where each edge is a dictionary of its properties
"""
@abstractmethod
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""Get popular labels by node degree (most connected entities)
Args:
limit: Maximum number of labels to return
Returns:
List of labels sorted by degree (highest first)
"""
@abstractmethod
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""Search labels with fuzzy matching
Args:
query: Search query string
limit: Maximum number of results to return
Returns:
List of matching labels sorted by relevance
"""
class DocStatus(str, Enum):
"""Document processing status"""

View file

@ -1089,3 +1089,100 @@ class MemgraphStorage(BaseGraphStorage):
edges.append(edge_properties)
await result.consume()
return edges
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""Get popular labels by node degree (most connected entities)
Args:
limit: Maximum number of labels to return
Returns:
List of labels sorted by degree (highest first)
"""
if self._driver is None:
raise RuntimeError(
"Memgraph driver is not initialized. Call 'await initialize()' first."
)
try:
workspace_label = self._get_workspace_label()
async with self._driver.session(
database=self._DATABASE, default_access_mode="READ"
) as session:
query = f"""
MATCH (n:`{workspace_label}`)
WHERE n.entity_id IS NOT NULL
OPTIONAL MATCH (n)-[r]-()
WITH n.entity_id AS label, count(r) AS degree
ORDER BY degree DESC, label ASC
LIMIT {limit}
RETURN label
"""
result = await session.run(query)
labels = []
async for record in result:
labels.append(record["label"])
await result.consume()
logger.debug(
f"[{self.workspace}] Retrieved {len(labels)} popular labels (limit: {limit})"
)
return labels
except Exception as e:
logger.error(f"[{self.workspace}] Error getting popular labels: {str(e)}")
return []
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""Search labels with fuzzy matching
Args:
query: Search query string
limit: Maximum number of results to return
Returns:
List of matching labels sorted by relevance
"""
if self._driver is None:
raise RuntimeError(
"Memgraph driver is not initialized. Call 'await initialize()' first."
)
query_lower = query.lower().strip()
if not query_lower:
return []
try:
workspace_label = self._get_workspace_label()
async with self._driver.session(
database=self._DATABASE, default_access_mode="READ"
) as session:
cypher_query = f"""
MATCH (n:`{workspace_label}`)
WHERE n.entity_id IS NOT NULL
WITH n.entity_id AS label, toLower(n.entity_id) AS label_lower
WHERE label_lower CONTAINS $query_lower
WITH label, label_lower,
CASE
WHEN label_lower = $query_lower THEN 1000
WHEN label_lower STARTS WITH $query_lower THEN 500
ELSE 100 - size(label)
END AS score
ORDER BY score DESC, label ASC
LIMIT {limit}
RETURN label
"""
result = await session.run(cypher_query, query_lower=query_lower)
labels = []
async for record in result:
labels.append(record["label"])
await result.consume()
logger.debug(
f"[{self.workspace}] Search query '{query}' returned {len(labels)} results (limit: {limit})"
)
return labels
except Exception as e:
logger.error(f"[{self.workspace}] Error searching labels: {str(e)}")
return []

View file

@ -1,4 +1,5 @@
import os
import re
import time
from dataclasses import dataclass, field
import numpy as np
@ -112,6 +113,7 @@ class MongoKVStorage(BaseKVStorage):
# Keep original namespace unchanged for type detection logic
if effective_workspace:
self.final_namespace = f"{effective_workspace}_{self.namespace}"
self.workspace = effective_workspace
logger.debug(
f"Final namespace with workspace prefix: '{self.final_namespace}'"
)
@ -335,6 +337,7 @@ class MongoDocStatusStorage(DocStatusStorage):
# Keep original namespace unchanged for type detection logic
if effective_workspace:
self.final_namespace = f"{effective_workspace}_{self.namespace}"
self.workspace = effective_workspace
logger.debug(
f"Final namespace with workspace prefix: '{self.final_namespace}'"
)
@ -474,6 +477,7 @@ class MongoDocStatusStorage(DocStatusStorage):
async def create_and_migrate_indexes_if_not_exists(self):
"""Create indexes to optimize pagination queries and migrate file_path indexes for Chinese collation"""
try:
# Get indexes for the current collection only
indexes_cursor = await self._data.list_indexes()
existing_indexes = await indexes_cursor.to_list(length=None)
existing_index_names = {idx.get("name", "") for idx in existing_indexes}
@ -481,80 +485,67 @@ class MongoDocStatusStorage(DocStatusStorage):
# Define collation configuration for Chinese pinyin sorting
collation_config = {"locale": "zh", "numericOrdering": True}
# 1. Define all indexes needed (including original pagination indexes and new collation indexes)
# Use workspace-specific index names to avoid cross-workspace conflicts
workspace_prefix = f"{self.workspace}_" if self.workspace != "_" else ""
# 1. Define all indexes needed with workspace-specific names
all_indexes = [
# Original pagination indexes
{
"name": "status_updated_at",
"name": f"{workspace_prefix}status_updated_at",
"keys": [("status", 1), ("updated_at", -1)],
},
{
"name": "status_created_at",
"name": f"{workspace_prefix}status_created_at",
"keys": [("status", 1), ("created_at", -1)],
},
{"name": "updated_at", "keys": [("updated_at", -1)]},
{"name": "created_at", "keys": [("created_at", -1)]},
{"name": "id", "keys": [("_id", 1)]},
{"name": "track_id", "keys": [("track_id", 1)]},
# New file_path indexes with Chinese collation
{"name": f"{workspace_prefix}updated_at", "keys": [("updated_at", -1)]},
{"name": f"{workspace_prefix}created_at", "keys": [("created_at", -1)]},
{"name": f"{workspace_prefix}id", "keys": [("_id", 1)]},
{"name": f"{workspace_prefix}track_id", "keys": [("track_id", 1)]},
# New file_path indexes with Chinese collation and workspace-specific names
{
"name": "file_path_zh_collation",
"name": f"{workspace_prefix}file_path_zh_collation",
"keys": [("file_path", 1)],
"collation": collation_config,
},
{
"name": "status_file_path_zh_collation",
"name": f"{workspace_prefix}status_file_path_zh_collation",
"keys": [("status", 1), ("file_path", 1)],
"collation": collation_config,
},
]
# 2. Handle index migration: drop conflicting indexes with different names but same key patterns
for index_info in all_indexes:
target_keys = index_info["keys"]
target_name = index_info["name"]
target_collation = index_info.get("collation")
# 2. Handle legacy index cleanup: only drop old indexes that exist in THIS collection
legacy_index_names = [
"file_path_zh_collation",
"status_file_path_zh_collation",
"status_updated_at",
"status_created_at",
"updated_at",
"created_at",
"id",
"track_id",
]
# Find existing indexes with the same key pattern but different names or collation
conflicting_indexes = []
for idx in existing_indexes:
idx_name = idx.get("name", "")
idx_keys = list(idx.get("key", {}).items())
idx_collation = idx.get("collation")
# Skip the _id_ index (MongoDB default)
if idx_name == "_id_":
continue
# Check if keys match but name or collation differs
if idx_keys == target_keys:
if (
idx_name != target_name
or (target_collation and not idx_collation)
or (not target_collation and idx_collation)
or (
target_collation
and idx_collation
and target_collation != idx_collation
)
):
conflicting_indexes.append(idx_name)
# Drop conflicting indexes
for conflicting_name in conflicting_indexes:
for legacy_name in legacy_index_names:
if (
legacy_name in existing_index_names
and legacy_name
!= f"{workspace_prefix}{legacy_name.replace(workspace_prefix, '')}"
):
try:
await self._data.drop_index(conflicting_name)
logger.info(
f"[{self.workspace}] Migrated: dropped conflicting index '{conflicting_name}' for collection {self._collection_name}"
await self._data.drop_index(legacy_name)
logger.debug(
f"[{self.workspace}] Migrated: dropped legacy index '{legacy_name}' from collection {self._collection_name}"
)
# Remove from existing_index_names to allow recreation
existing_index_names.discard(conflicting_name)
existing_index_names.discard(legacy_name)
except PyMongoError as drop_error:
logger.warning(
f"[{self.workspace}] Failed to drop conflicting index '{conflicting_name}': {drop_error}"
f"[{self.workspace}] Failed to drop legacy index '{legacy_name}' from collection {self._collection_name}: {drop_error}"
)
# 3. Create all needed indexes
# 3. Create all needed indexes with workspace-specific names
for index_info in all_indexes:
index_name = index_info["name"]
if index_name not in existing_index_names:
@ -566,7 +557,7 @@ class MongoDocStatusStorage(DocStatusStorage):
await self._data.create_index(
index_info["keys"], **create_kwargs
)
logger.info(
logger.debug(
f"[{self.workspace}] Created index '{index_name}' for collection {self._collection_name}"
)
except PyMongoError as create_error:
@ -734,6 +725,7 @@ class MongoGraphStorage(BaseGraphStorage):
# Keep original namespace unchanged for type detection logic
if effective_workspace:
self.final_namespace = f"{effective_workspace}_{self.namespace}"
self.workspace = effective_workspace
logger.debug(
f"Final namespace with workspace prefix: '{self.final_namespace}'"
)
@ -757,6 +749,10 @@ class MongoGraphStorage(BaseGraphStorage):
self.edge_collection = await get_or_create_collection(
self.db, self._edge_collection_name
)
# Create Atlas Search index for better search performance if possible
await self.create_search_index_if_not_exists()
logger.debug(
f"[{self.workspace}] Use MongoDB as KG {self._collection_name}"
)
@ -1612,6 +1608,403 @@ class MongoGraphStorage(BaseGraphStorage):
edges.append(edge_dict)
return edges
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""Get popular labels by node degree (most connected entities)
Args:
limit: Maximum number of labels to return
Returns:
List of labels sorted by degree (highest first)
"""
try:
# Use aggregation pipeline to count edges per node and sort by degree
pipeline = [
# Count outbound edges
{"$group": {"_id": "$source_node_id", "out_degree": {"$sum": 1}}},
# Union with inbound edges count
{
"$unionWith": {
"coll": self._edge_collection_name,
"pipeline": [
{
"$group": {
"_id": "$target_node_id",
"in_degree": {"$sum": 1},
}
}
],
}
},
# Group by node_id and sum degrees
{
"$group": {
"_id": "$_id",
"total_degree": {
"$sum": {
"$add": [
{"$ifNull": ["$out_degree", 0]},
{"$ifNull": ["$in_degree", 0]},
]
}
},
}
},
# Sort by degree descending, then by label ascending
{"$sort": {"total_degree": -1, "_id": 1}},
# Limit results
{"$limit": limit},
# Project only the label
{"$project": {"_id": 1}},
]
cursor = await self.edge_collection.aggregate(pipeline, allowDiskUse=True)
labels = []
async for doc in cursor:
if doc.get("_id"):
labels.append(doc["_id"])
logger.debug(
f"[{self.workspace}] Retrieved {len(labels)} popular labels (limit: {limit})"
)
return labels
except Exception as e:
logger.error(f"[{self.workspace}] Error getting popular labels: {str(e)}")
return []
async def _try_atlas_text_search(self, query_strip: str, limit: int) -> list[str]:
"""Try Atlas Search using simple text search."""
try:
pipeline = [
{
"$search": {
"index": "entity_id_search_idx",
"text": {"query": query_strip, "path": "_id"},
}
},
{"$project": {"_id": 1, "score": {"$meta": "searchScore"}}},
{"$limit": limit},
]
cursor = await self.collection.aggregate(pipeline)
labels = [doc["_id"] async for doc in cursor if doc.get("_id")]
if labels:
logger.debug(
f"[{self.workspace}] Atlas text search returned {len(labels)} results"
)
return labels
return []
except PyMongoError as e:
logger.debug(f"[{self.workspace}] Atlas text search failed: {e}")
return []
async def _try_atlas_autocomplete_search(
self, query_strip: str, limit: int
) -> list[str]:
"""Try Atlas Search using autocomplete for prefix matching."""
try:
pipeline = [
{
"$search": {
"index": "entity_id_search_idx",
"autocomplete": {
"query": query_strip,
"path": "_id",
"fuzzy": {"maxEdits": 1, "prefixLength": 1},
},
}
},
{"$project": {"_id": 1, "score": {"$meta": "searchScore"}}},
{"$limit": limit},
]
cursor = await self.collection.aggregate(pipeline)
labels = [doc["_id"] async for doc in cursor if doc.get("_id")]
if labels:
logger.debug(
f"[{self.workspace}] Atlas autocomplete search returned {len(labels)} results"
)
return labels
return []
except PyMongoError as e:
logger.debug(f"[{self.workspace}] Atlas autocomplete search failed: {e}")
return []
async def _try_atlas_compound_search(
self, query_strip: str, limit: int
) -> list[str]:
"""Try Atlas Search using compound query for comprehensive matching."""
try:
pipeline = [
{
"$search": {
"index": "entity_id_search_idx",
"compound": {
"should": [
{
"text": {
"query": query_strip,
"path": "_id",
"score": {"boost": {"value": 10}},
}
},
{
"autocomplete": {
"query": query_strip,
"path": "_id",
"score": {"boost": {"value": 5}},
"fuzzy": {"maxEdits": 1, "prefixLength": 1},
}
},
{
"wildcard": {
"query": f"*{query_strip}*",
"path": "_id",
"score": {"boost": {"value": 2}},
}
},
],
"minimumShouldMatch": 1,
},
}
},
{"$project": {"_id": 1, "score": {"$meta": "searchScore"}}},
{"$sort": {"score": {"$meta": "searchScore"}}},
{"$limit": limit},
]
cursor = await self.collection.aggregate(pipeline)
labels = [doc["_id"] async for doc in cursor if doc.get("_id")]
if labels:
logger.debug(
f"[{self.workspace}] Atlas compound search returned {len(labels)} results"
)
return labels
return []
except PyMongoError as e:
logger.debug(f"[{self.workspace}] Atlas compound search failed: {e}")
return []
async def _fallback_regex_search(self, query_strip: str, limit: int) -> list[str]:
"""Fallback to regex-based search when Atlas Search fails."""
try:
logger.debug(
f"[{self.workspace}] Using regex fallback search for: '{query_strip}'"
)
escaped_query = re.escape(query_strip)
regex_condition = {"_id": {"$regex": escaped_query, "$options": "i"}}
cursor = self.collection.find(regex_condition, {"_id": 1}).limit(limit * 2)
docs = await cursor.to_list(length=limit * 2)
# Extract labels
labels = []
for doc in docs:
doc_id = doc.get("_id")
if doc_id:
labels.append(doc_id)
# Sort results to prioritize exact matches and starts-with matches
def sort_key(label):
label_lower = label.lower()
query_lower_strip = query_strip.lower()
if label_lower == query_lower_strip:
return (0, label_lower) # Exact match - highest priority
elif label_lower.startswith(query_lower_strip):
return (1, label_lower) # Starts with - medium priority
else:
return (2, label_lower) # Contains - lowest priority
labels.sort(key=sort_key)
labels = labels[:limit] # Apply final limit after sorting
logger.debug(
f"[{self.workspace}] Regex fallback search returned {len(labels)} results (limit: {limit})"
)
return labels
except Exception as e:
logger.error(f"[{self.workspace}] Regex fallback search failed: {e}")
import traceback
logger.error(f"[{self.workspace}] Traceback: {traceback.format_exc()}")
return []
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""
Search labels with progressive fallback strategy:
1. Atlas text search (simple and fast)
2. Atlas autocomplete search (prefix matching with fuzzy)
3. Atlas compound search (comprehensive matching)
4. Regex fallback (when Atlas Search is unavailable)
"""
query_strip = query.strip()
if not query_strip:
return []
# First check if we have any nodes at all
try:
node_count = await self.collection.count_documents({})
if node_count == 0:
logger.debug(
f"[{self.workspace}] No nodes found in collection {self._collection_name}"
)
return []
except PyMongoError as e:
logger.error(f"[{self.workspace}] Error counting nodes: {e}")
return []
# Progressive search strategy
search_methods = [
("text", self._try_atlas_text_search),
("autocomplete", self._try_atlas_autocomplete_search),
("compound", self._try_atlas_compound_search),
]
# Try Atlas Search methods in order
for method_name, search_method in search_methods:
try:
labels = await search_method(query_strip, limit)
if labels:
logger.debug(
f"[{self.workspace}] Search successful using {method_name} method: {len(labels)} results"
)
return labels
else:
logger.debug(
f"[{self.workspace}] {method_name} search returned no results, trying next method"
)
except Exception as e:
logger.debug(
f"[{self.workspace}] {method_name} search failed: {e}, trying next method"
)
continue
# If all Atlas Search methods fail, use regex fallback
logger.info(
f"[{self.workspace}] All Atlas Search methods failed, using regex fallback search for: '{query_strip}'"
)
return await self._fallback_regex_search(query_strip, limit)
async def _check_if_index_needs_rebuild(
self, indexes: list, index_name: str
) -> bool:
"""Check if the existing index needs to be rebuilt due to configuration issues."""
for index in indexes:
if index["name"] == index_name:
# Check if the index has the old problematic configuration
definition = index.get("latestDefinition", {})
mappings = definition.get("mappings", {})
fields = mappings.get("fields", {})
id_field = fields.get("_id", {})
# If it's the old single-type autocomplete configuration, rebuild
if (
isinstance(id_field, dict)
and id_field.get("type") == "autocomplete"
):
logger.info(
f"[{self.workspace}] Found old index configuration for '{index_name}', will rebuild"
)
return True
# If it's not a list (multi-type configuration), rebuild
if not isinstance(id_field, list):
logger.info(
f"[{self.workspace}] Index '{index_name}' needs upgrade to multi-type configuration"
)
return True
logger.info(
f"[{self.workspace}] Index '{index_name}' has correct configuration"
)
return False
return True # Index doesn't exist, needs creation
async def _safely_drop_old_index(self, index_name: str):
"""Safely drop the old search index."""
try:
await self.collection.drop_search_index(index_name)
logger.info(
f"[{self.workspace}] Successfully dropped old search index '{index_name}'"
)
except PyMongoError as e:
logger.warning(
f"[{self.workspace}] Could not drop old index '{index_name}': {e}"
)
async def _create_improved_search_index(self, index_name: str):
"""Create an improved search index with multiple field types."""
search_index_model = SearchIndexModel(
definition={
"mappings": {
"dynamic": False,
"fields": {
"_id": [
{
"type": "string",
},
{
"type": "token",
},
{
"type": "autocomplete",
"maxGrams": 15,
"minGrams": 2,
},
]
},
},
"analyzer": "lucene.standard", # Index-level analyzer for text processing
},
name=index_name,
type="search",
)
await self.collection.create_search_index(search_index_model)
logger.info(
f"[{self.workspace}] Created improved Atlas Search index '{index_name}' for collection {self._collection_name}. "
)
logger.info(
f"[{self.workspace}] Index will be built asynchronously, using regex fallback until ready."
)
async def create_search_index_if_not_exists(self):
"""Creates an improved Atlas Search index for entity search, rebuilding if necessary."""
index_name = "entity_id_search_idx"
try:
# Check if we're using MongoDB Atlas (has search index capabilities)
indexes_cursor = await self.collection.list_search_indexes()
indexes = await indexes_cursor.to_list(length=None)
# Check if we need to rebuild the index
needs_rebuild = await self._check_if_index_needs_rebuild(
indexes, index_name
)
if needs_rebuild:
# Check if index exists and drop it
index_exists = any(idx["name"] == index_name for idx in indexes)
if index_exists:
await self._safely_drop_old_index(index_name)
# Create the improved search index (async, no waiting)
await self._create_improved_search_index(index_name)
else:
logger.info(
f"[{self.workspace}] Atlas Search index '{index_name}' already exists with correct configuration"
)
except PyMongoError as e:
# This is expected if not using MongoDB Atlas or if search indexes are not supported
logger.info(
f"[{self.workspace}] Could not create Atlas Search index for {self._collection_name}: {e}. "
"This is normal if not using MongoDB Atlas - search will use regex fallback."
)
except Exception as e:
logger.warning(
f"[{self.workspace}] Unexpected error creating Atlas Search index for {self._collection_name}: {e}"
)
async def drop(self) -> dict[str, str]:
"""Drop the storage by removing all documents in the collection.
@ -1685,6 +2078,7 @@ class MongoVectorDBStorage(BaseVectorStorage):
# Keep original namespace unchanged for type detection logic
if effective_workspace:
self.final_namespace = f"{effective_workspace}_{self.namespace}"
self.workspace = effective_workspace
logger.debug(
f"Final namespace with workspace prefix: '{self.final_namespace}'"
)

View file

@ -70,6 +70,11 @@ class Neo4JStorage(BaseGraphStorage):
"""Return workspace label (guaranteed non-empty during initialization)"""
return self.workspace
def _is_chinese_text(self, text: str) -> bool:
"""Check if text contains Chinese characters."""
chinese_pattern = re.compile(r"[\u4e00-\u9fff]+")
return bool(chinese_pattern.search(text))
async def initialize(self):
async with get_data_init_lock():
URI = os.environ.get("NEO4J_URI", config.get("neo4j", "uri", fallback=None))
@ -201,44 +206,132 @@ class Neo4JStorage(BaseGraphStorage):
raise e
if connected:
# Create index for workspace nodes on entity_id if it doesn't exist
workspace_label = self._get_workspace_label()
# Create B-Tree index for entity_id for faster lookups
try:
async with self._driver.session(database=database) as session:
# Check if index exists first
check_query = f"""
CALL db.indexes() YIELD name, labelsOrTypes, properties
WHERE labelsOrTypes = ['{workspace_label}'] AND properties = ['entity_id']
RETURN count(*) > 0 AS exists
"""
try:
check_result = await session.run(check_query)
record = await check_result.single()
await check_result.consume()
index_exists = record and record.get("exists", False)
if not index_exists:
# Create index only if it doesn't exist
result = await session.run(
f"CREATE INDEX FOR (n:`{workspace_label}`) ON (n.entity_id)"
)
await result.consume()
logger.info(
f"[{self.workspace}] Created index for {workspace_label} nodes on entity_id in {database}"
)
except Exception:
# Fallback if db.indexes() is not supported in this Neo4j version
result = await session.run(
f"CREATE INDEX IF NOT EXISTS FOR (n:`{workspace_label}`) ON (n.entity_id)"
)
await result.consume()
await session.run(
f"CREATE INDEX IF NOT EXISTS FOR (n:`{workspace_label}`) ON (n.entity_id)"
)
logger.info(
f"[{self.workspace}] Ensured B-Tree index on entity_id for {workspace_label} in {database}"
)
except Exception as e:
logger.warning(
f"[{self.workspace}] Failed to create index: {str(e)}"
f"[{self.workspace}] Failed to create B-Tree index: {str(e)}"
)
# Create full-text index for entity_id for faster text searches
await self._create_fulltext_index(
self._driver, self._DATABASE, workspace_label
)
break
async def _create_fulltext_index(
self, driver: AsyncDriver, database: str, workspace_label: str
):
"""Create a full-text index on the entity_id property with Chinese tokenizer support."""
index_name = "entity_id_fulltext_idx"
try:
async with driver.session(database=database) as session:
# Check if the full-text index exists and get its configuration
check_index_query = "SHOW FULLTEXT INDEXES"
result = await session.run(check_index_query)
indexes = await result.data()
await result.consume()
existing_index = None
for idx in indexes:
if idx["name"] == index_name:
existing_index = idx
break
# Check if we need to recreate the index
needs_recreation = False
if existing_index:
# Check if the existing index has CJK analyzer
index_config = existing_index.get("options", {})
current_analyzer = index_config.get("indexConfig", {}).get(
"fulltext.analyzer", "standard"
)
if current_analyzer != "cjk":
logger.info(
f"[{self.workspace}] Existing index '{index_name}' uses '{current_analyzer}' analyzer. "
"Recreating with CJK analyzer for Chinese support."
)
needs_recreation = True
else:
logger.debug(
f"[{self.workspace}] Full-text index '{index_name}' already exists with CJK analyzer."
)
return
if not existing_index or needs_recreation:
# Drop existing index if it needs recreation
if needs_recreation:
try:
drop_query = f"DROP INDEX {index_name}"
result = await session.run(drop_query)
await result.consume()
logger.info(
f"[{self.workspace}] Dropped existing index '{index_name}'"
)
except Exception as drop_error:
logger.warning(
f"[{self.workspace}] Failed to drop existing index: {str(drop_error)}"
)
# Create new index with CJK analyzer
logger.info(
f"[{self.workspace}] Creating full-text index '{index_name}' with Chinese tokenizer support."
)
try:
create_index_query = f"""
CREATE FULLTEXT INDEX {index_name}
FOR (n:`{workspace_label}`) ON EACH [n.entity_id]
OPTIONS {{
indexConfig: {{
`fulltext.analyzer`: 'cjk',
`fulltext.eventually_consistent`: true
}}
}}
"""
result = await session.run(create_index_query)
await result.consume()
logger.info(
f"[{self.workspace}] Successfully created full-text index '{index_name}' with CJK analyzer."
)
except Exception as cjk_error:
# Fallback to standard analyzer if CJK is not supported
logger.warning(
f"[{self.workspace}] CJK analyzer not supported: {str(cjk_error)}. "
"Falling back to standard analyzer."
)
create_index_query = f"""
CREATE FULLTEXT INDEX {index_name}
FOR (n:`{workspace_label}`) ON EACH [n.entity_id]
"""
result = await session.run(create_index_query)
await result.consume()
logger.info(
f"[{self.workspace}] Successfully created full-text index '{index_name}' with standard analyzer."
)
except Exception as e:
# Handle cases where the command might not be supported
if "Unknown command" in str(e) or "invalid syntax" in str(e).lower():
logger.warning(
f"[{self.workspace}] Could not create or verify full-text index '{index_name}'. "
"This might be because you are using a Neo4j version that does not support it. "
"Search functionality will fall back to slower, non-indexed queries."
)
else:
logger.error(
f"[{self.workspace}] Failed to create or verify full-text index '{index_name}': {str(e)}"
)
async def finalize(self):
"""Close the Neo4j driver and release all resources"""
async with get_graph_db_lock():
@ -251,7 +344,7 @@ class Neo4JStorage(BaseGraphStorage):
await self.finalize()
async def index_done_callback(self) -> None:
# Noe4J handles persistence automatically
# Neo4J handles persistence automatically
pass
async def has_node(self, node_id: str) -> bool:
@ -1523,6 +1616,180 @@ class Neo4JStorage(BaseGraphStorage):
await result.consume()
return edges
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""Get popular labels by node degree (most connected entities)
Args:
limit: Maximum number of labels to return
Returns:
List of labels sorted by degree (highest first)
"""
workspace_label = self._get_workspace_label()
async with self._driver.session(
database=self._DATABASE, default_access_mode="READ"
) as session:
try:
query = f"""
MATCH (n:`{workspace_label}`)
WHERE n.entity_id IS NOT NULL
OPTIONAL MATCH (n)-[r]-()
WITH n.entity_id AS label, count(r) AS degree
ORDER BY degree DESC, label ASC
LIMIT $limit
RETURN label
"""
result = await session.run(query, limit=limit)
labels = []
async for record in result:
labels.append(record["label"])
await result.consume()
logger.debug(
f"[{self.workspace}] Retrieved {len(labels)} popular labels (limit: {limit})"
)
return labels
except Exception as e:
logger.error(
f"[{self.workspace}] Error getting popular labels: {str(e)}"
)
await result.consume()
raise
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""
Search labels with fuzzy matching, using a full-text index for performance if available.
Enhanced with Chinese text support using CJK analyzer.
Falls back to a slower CONTAINS search if the index is not available or fails.
"""
workspace_label = self._get_workspace_label()
query_strip = query.strip()
if not query_strip:
return []
query_lower = query_strip.lower()
is_chinese = self._is_chinese_text(query_strip)
index_name = "entity_id_fulltext_idx"
# Attempt to use the full-text index first
try:
async with self._driver.session(
database=self._DATABASE, default_access_mode="READ"
) as session:
if is_chinese:
# For Chinese text, use different search strategies
cypher_query = f"""
CALL db.index.fulltext.queryNodes($index_name, $search_query) YIELD node, score
WITH node, score
WHERE node:`{workspace_label}`
WITH node.entity_id AS label, score
WITH label, score,
CASE
WHEN label = $query_strip THEN score + 1000
WHEN label CONTAINS $query_strip THEN score + 500
ELSE score
END AS final_score
RETURN label
ORDER BY final_score DESC, label ASC
LIMIT $limit
"""
# For Chinese, don't add wildcard as it may not work properly with CJK analyzer
search_query = query_strip
else:
# For non-Chinese text, use the original logic with wildcard
cypher_query = f"""
CALL db.index.fulltext.queryNodes($index_name, $search_query) YIELD node, score
WITH node, score
WHERE node:`{workspace_label}`
WITH node.entity_id AS label, toLower(node.entity_id) AS label_lower, score
WITH label, label_lower, score,
CASE
WHEN label_lower = $query_lower THEN score + 1000
WHEN label_lower STARTS WITH $query_lower THEN score + 500
WHEN label_lower CONTAINS ' ' + $query_lower OR label_lower CONTAINS '_' + $query_lower THEN score + 50
ELSE score
END AS final_score
RETURN label
ORDER BY final_score DESC, label ASC
LIMIT $limit
"""
search_query = f"{query_strip}*"
result = await session.run(
cypher_query,
index_name=index_name,
search_query=search_query,
query_lower=query_lower,
query_strip=query_strip,
limit=limit,
)
labels = [record["label"] async for record in result]
await result.consume()
logger.debug(
f"[{self.workspace}] Full-text search ({'Chinese' if is_chinese else 'Latin'}) for '{query}' returned {len(labels)} results (limit: {limit})"
)
return labels
except Exception as e:
# If the full-text search fails, fall back to CONTAINS search
logger.warning(
f"[{self.workspace}] Full-text search failed with error: {str(e)}. "
"Falling back to slower, non-indexed search."
)
# Enhanced fallback implementation
async with self._driver.session(
database=self._DATABASE, default_access_mode="READ"
) as session:
if is_chinese:
# For Chinese text, use direct CONTAINS without case conversion
cypher_query = f"""
MATCH (n:`{workspace_label}`)
WHERE n.entity_id IS NOT NULL
WITH n.entity_id AS label
WHERE label CONTAINS $query_strip
WITH label,
CASE
WHEN label = $query_strip THEN 1000
WHEN label STARTS WITH $query_strip THEN 500
ELSE 100 - size(label)
END AS score
ORDER BY score DESC, label ASC
LIMIT $limit
RETURN label
"""
result = await session.run(
cypher_query, query_strip=query_strip, limit=limit
)
else:
# For non-Chinese text, use the original fallback logic
cypher_query = f"""
MATCH (n:`{workspace_label}`)
WHERE n.entity_id IS NOT NULL
WITH n.entity_id AS label, toLower(n.entity_id) AS label_lower
WHERE label_lower CONTAINS $query_lower
WITH label, label_lower,
CASE
WHEN label_lower = $query_lower THEN 1000
WHEN label_lower STARTS WITH $query_lower THEN 500
ELSE 100 - size(label)
END AS score
ORDER BY score DESC, label ASC
LIMIT $limit
RETURN label
"""
result = await session.run(
cypher_query, query_lower=query_lower, limit=limit
)
labels = [record["label"] async for record in result]
await result.consume()
logger.debug(
f"[{self.workspace}] Fallback search ({'Chinese' if is_chinese else 'Latin'}) for '{query}' returned {len(labels)} results (limit: {limit})"
)
return labels
async def drop(self) -> dict[str, str]:
"""Drop all data from current workspace storage and clean up resources

View file

@ -212,6 +212,87 @@ class NetworkXStorage(BaseGraphStorage):
# Return sorted list
return sorted(list(labels))
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""
Get popular labels by node degree (most connected entities)
Args:
limit: Maximum number of labels to return
Returns:
List of labels sorted by degree (highest first)
"""
graph = await self._get_graph()
# Get degrees of all nodes and sort by degree descending
degrees = dict(graph.degree())
sorted_nodes = sorted(degrees.items(), key=lambda x: x[1], reverse=True)
# Return top labels limited by the specified limit
popular_labels = [str(node) for node, _ in sorted_nodes[:limit]]
logger.debug(
f"[{self.workspace}] Retrieved {len(popular_labels)} popular labels (limit: {limit})"
)
return popular_labels
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""
Search labels with fuzzy matching
Args:
query: Search query string
limit: Maximum number of results to return
Returns:
List of matching labels sorted by relevance
"""
graph = await self._get_graph()
query_lower = query.lower().strip()
if not query_lower:
return []
# Collect matching nodes with relevance scores
matches = []
for node in graph.nodes():
node_str = str(node)
node_lower = node_str.lower()
# Skip if no match
if query_lower not in node_lower:
continue
# Calculate relevance score
# Exact match gets highest score
if node_lower == query_lower:
score = 1000
# Prefix match gets high score
elif node_lower.startswith(query_lower):
score = 500
# Contains match gets base score, with bonus for shorter strings
else:
# Shorter strings with matches are more relevant
score = 100 - len(node_str)
# Bonus for word boundary matches
if f" {query_lower}" in node_lower or f"_{query_lower}" in node_lower:
score += 50
matches.append((node_str, score))
# Sort by relevance score (desc) then alphabetically
matches.sort(key=lambda x: (-x[1], x[0]))
# Return top matches limited by the specified limit
search_results = [match[0] for match in matches[:limit]]
logger.debug(
f"[{self.workspace}] Search query '{query}' returned {len(search_results)} results (limit: {limit})"
)
return search_results
async def get_knowledge_graph(
self,
node_label: str,
@ -272,6 +353,9 @@ class NetworkXStorage(BaseGraphStorage):
# Store (node, depth, degree) in the queue
queue = [(node_label, 0, graph.degree(node_label))]
# Flag to track if there are unexplored neighbors due to depth limit
has_unexplored_neighbors = False
# Modified breadth-first search with degree-based prioritization
while queue and len(bfs_nodes) < max_nodes:
# Get the current depth from the first node in queue
@ -303,18 +387,30 @@ class NetworkXStorage(BaseGraphStorage):
for neighbor in unvisited_neighbors:
neighbor_degree = graph.degree(neighbor)
queue.append((neighbor, depth + 1, neighbor_degree))
else:
# Check if there are unexplored neighbors (skipped due to depth limit)
neighbors = list(graph.neighbors(current_node))
unvisited_neighbors = [
n for n in neighbors if n not in visited
]
if unvisited_neighbors:
has_unexplored_neighbors = True
# Check if we've reached max_nodes
if len(bfs_nodes) >= max_nodes:
break
# Check if graph is truncated - if we still have nodes in the queue
# and we've reached max_nodes, then the graph is truncated
if queue and len(bfs_nodes) >= max_nodes:
# Check if graph is truncated - either due to max_nodes limit or depth limit
if (queue and len(bfs_nodes) >= max_nodes) or has_unexplored_neighbors:
result.is_truncated = True
logger.info(
f"[{self.workspace}] Graph truncated: breadth-first search limited to {max_nodes} nodes"
)
if len(bfs_nodes) >= max_nodes:
logger.info(
f"[{self.workspace}] Graph truncated: max_nodes limit {max_nodes} reached"
)
else:
logger.info(
f"[{self.workspace}] Graph truncated: only {len(bfs_nodes)} nodes found within max_depth {max_depth}"
)
# Create subgraph with BFS discovered nodes
subgraph = graph.subgraph(bfs_nodes)

View file

@ -4259,6 +4259,113 @@ class PGGraphStorage(BaseGraphStorage):
edges.append(edge_properties)
return edges
async def get_popular_labels(self, limit: int = 300) -> list[str]:
"""Get popular labels by node degree (most connected entities) using native SQL for performance."""
try:
# Native SQL query to calculate node degrees directly from AGE's underlying tables
# This is significantly faster than using the cypher() function wrapper
query = f"""
WITH node_degrees AS (
SELECT
node_id,
COUNT(*) AS degree
FROM (
SELECT start_id AS node_id FROM {self.graph_name}._ag_label_edge
UNION ALL
SELECT end_id AS node_id FROM {self.graph_name}._ag_label_edge
) AS all_edges
GROUP BY node_id
)
SELECT
(ag_catalog.agtype_access_operator(VARIADIC ARRAY[v.properties, '"entity_id"'::agtype]))::text AS label
FROM
node_degrees d
JOIN
{self.graph_name}._ag_label_vertex v ON d.node_id = v.id
WHERE
ag_catalog.agtype_access_operator(VARIADIC ARRAY[v.properties, '"entity_id"'::agtype]) IS NOT NULL
ORDER BY
d.degree DESC,
label ASC
LIMIT $1;
"""
results = await self._query(query, params={"limit": limit})
labels = [
result["label"] for result in results if result and "label" in result
]
logger.debug(
f"[{self.workspace}] Retrieved {len(labels)} popular labels (limit: {limit})"
)
return labels
except Exception as e:
logger.error(f"[{self.workspace}] Error getting popular labels: {str(e)}")
return []
async def search_labels(self, query: str, limit: int = 50) -> list[str]:
"""Search labels with fuzzy matching using native, parameterized SQL for performance and security."""
query_lower = query.lower().strip()
if not query_lower:
return []
try:
# Re-implementing with the correct agtype access operator and full scoring logic.
sql_query = f"""
WITH ranked_labels AS (
SELECT
(ag_catalog.agtype_access_operator(VARIADIC ARRAY[properties, '"entity_id"'::agtype]))::text AS label,
LOWER((ag_catalog.agtype_access_operator(VARIADIC ARRAY[properties, '"entity_id"'::agtype]))::text) AS label_lower
FROM
{self.graph_name}._ag_label_vertex
WHERE
ag_catalog.agtype_access_operator(VARIADIC ARRAY[properties, '"entity_id"'::agtype]) IS NOT NULL
AND LOWER((ag_catalog.agtype_access_operator(VARIADIC ARRAY[properties, '"entity_id"'::agtype]))::text) ILIKE $1
)
SELECT
label
FROM (
SELECT
label,
CASE
WHEN label_lower = $2 THEN 1000
WHEN label_lower LIKE $3 THEN 500
ELSE (100 - LENGTH(label))
END +
CASE
WHEN label_lower LIKE $4 OR label_lower LIKE $5 THEN 50
ELSE 0
END AS score
FROM
ranked_labels
) AS scored_labels
ORDER BY
score DESC,
label ASC
LIMIT $6;
"""
params = (
f"%{query_lower}%", # For the main ILIKE clause ($1)
query_lower, # For exact match ($2)
f"{query_lower}%", # For prefix match ($3)
f"% {query_lower}%", # For word boundary (space) ($4)
f"%_{query_lower}%", # For word boundary (underscore) ($5)
limit, # For LIMIT ($6)
)
results = await self._query(sql_query, params=dict(enumerate(params, 1)))
labels = [
result["label"] for result in results if result and "label" in result
]
logger.debug(
f"[{self.workspace}] Search query '{query}' returned {len(labels)} results (limit: {limit})"
)
return labels
except Exception as e:
logger.error(
f"[{self.workspace}] Error searching labels with query '{query}': {str(e)}"
)
return []
async def drop(self) -> dict[str, str]:
"""Drop the storage"""
async with get_graph_db_lock():

View file

@ -1,5 +1,5 @@
import axios, { AxiosError } from 'axios'
import { backendBaseUrl } from '@/lib/constants'
import { backendBaseUrl, popularLabelsDefaultLimit, searchLabelsDefaultLimit } from '@/lib/constants'
import { errorMessage } from '@/lib/utils'
import { useSettingsStore } from '@/stores/settings'
import { navigationService } from '@/services/navigation'
@ -319,6 +319,16 @@ export const getGraphLabels = async (): Promise<string[]> => {
return response.data
}
export const getPopularLabels = async (limit: number = popularLabelsDefaultLimit): Promise<string[]> => {
const response = await axiosInstance.get(`/graph/label/popular?limit=${limit}`)
return response.data
}
export const searchLabels = async (query: string, limit: number = searchLabelsDefaultLimit): Promise<string[]> => {
const response = await axiosInstance.get(`/graph/label/search?q=${encodeURIComponent(query)}&limit=${limit}`)
return response.data
}
export const checkHealth = async (): Promise<
LightragStatus | { status: 'error'; message: string }
> => {

View file

@ -2,124 +2,101 @@ import { useCallback, useEffect } from 'react'
import { AsyncSelect } from '@/components/ui/AsyncSelect'
import { useSettingsStore } from '@/stores/settings'
import { useGraphStore } from '@/stores/graph'
import { labelListLimit, controlButtonVariant } from '@/lib/constants'
import MiniSearch from 'minisearch'
import {
dropdownDisplayLimit,
controlButtonVariant,
popularLabelsDefaultLimit,
searchLabelsDefaultLimit
} from '@/lib/constants'
import { useTranslation } from 'react-i18next'
import { RefreshCw } from 'lucide-react'
import Button from '@/components/ui/Button'
import { SearchHistoryManager } from '@/utils/SearchHistoryManager'
import { getPopularLabels, searchLabels } from '@/api/lightrag'
const GraphLabels = () => {
const { t } = useTranslation()
const label = useSettingsStore.use.queryLabel()
const allDatabaseLabels = useGraphStore.use.allDatabaseLabels()
const labelsFetchAttempted = useGraphStore.use.labelsFetchAttempted()
// Remove initial label fetch effect as it's now handled by fetchGraph based on lastSuccessfulQueryLabel
// Initialize search history on component mount
useEffect(() => {
const initializeHistory = async () => {
const history = SearchHistoryManager.getHistory()
const getSearchEngine = useCallback(() => {
// Create search engine
const searchEngine = new MiniSearch({
idField: 'id',
fields: ['value'],
searchOptions: {
prefix: true,
fuzzy: 0.2,
boost: {
label: 2
if (history.length === 0) {
// If no history exists, fetch popular labels and initialize
try {
const popularLabels = await getPopularLabels(popularLabelsDefaultLimit)
await SearchHistoryManager.initializeWithDefaults(popularLabels)
} catch (error) {
console.error('Failed to initialize search history:', error)
// No fallback needed, API is the source of truth
}
}
})
// Add documents
const documents = allDatabaseLabels.map((str, index) => ({ id: index, value: str }))
searchEngine.addAll(documents)
return {
labels: allDatabaseLabels,
searchEngine
}
}, [allDatabaseLabels])
initializeHistory()
}, [])
const fetchData = useCallback(
async (query?: string): Promise<string[]> => {
const { labels, searchEngine } = getSearchEngine()
let results: string[] = [];
if (!query || query.trim() === '' || query.trim() === '*') {
// Empty query: return search history
results = SearchHistoryManager.getHistoryLabels(dropdownDisplayLimit)
} else {
// Non-empty query: call backend search API
try {
const apiResults = await searchLabels(query.trim(), searchLabelsDefaultLimit)
results = apiResults.length <= dropdownDisplayLimit
? apiResults
: [...apiResults.slice(0, dropdownDisplayLimit), '...']
} catch (error) {
console.error('Search API failed, falling back to local history search:', error)
let result: string[] = labels
if (query) {
// Search labels using MiniSearch
result = searchEngine.search(query).map((r: { id: number }) => labels[r.id])
// Add middle-content matching if results are few
// This enables matching content in the middle of text, not just from the beginning
if (result.length < 15) {
// Get already matched labels to avoid duplicates
const matchedLabels = new Set(result)
// Perform middle-content matching on all labels
const middleMatchResults = labels.filter(label => {
// Skip already matched labels
if (matchedLabels.has(label)) return false
// Match if label contains query string but doesn't start with it
return label &&
typeof label === 'string' &&
!label.toLowerCase().startsWith(query.toLowerCase()) &&
label.toLowerCase().includes(query.toLowerCase())
})
// Merge results
result = [...result, ...middleMatchResults]
// Fallback to local history search
const history = SearchHistoryManager.getHistory()
const queryLower = query.toLowerCase().trim()
results = history
.filter(item => item.label.toLowerCase().includes(queryLower))
.map(item => item.label)
.slice(0, dropdownDisplayLimit)
}
}
return result.length <= labelListLimit
? result
: [...result.slice(0, labelListLimit), '...']
// Always show '*' at the top, and remove duplicates
const finalResults = ['*', ...results.filter(label => label !== '*')];
return finalResults;
},
[getSearchEngine]
[]
)
// Validate label
useEffect(() => {
const handleRefresh = useCallback(async () => {
// Clear search history
SearchHistoryManager.clearHistory()
if (labelsFetchAttempted) {
if (allDatabaseLabels.length > 1) {
if (label && label !== '*' && !allDatabaseLabels.includes(label)) {
console.log(`Label "${label}" not in available labels, setting to "*"`);
useSettingsStore.getState().setQueryLabel('*');
} else {
console.log(`Label "${label}" is valid`);
}
} else if (label && allDatabaseLabels.length <= 1 && label && label !== '*') {
console.log('Available labels list is empty, setting label to empty');
useSettingsStore.getState().setQueryLabel('');
}
useGraphStore.getState().setLabelsFetchAttempted(false)
// Reinitialize with popular labels
try {
const popularLabels = await getPopularLabels(popularLabelsDefaultLimit)
await SearchHistoryManager.initializeWithDefaults(popularLabels)
} catch (error) {
console.error('Failed to reload popular labels:', error)
// No fallback needed
}
}, [allDatabaseLabels, label, labelsFetchAttempted]);
const handleRefresh = useCallback(() => {
// Reset fetch status flags
// Reset fetch status flags to trigger UI refresh
useGraphStore.getState().setLabelsFetchAttempted(false)
useGraphStore.getState().setGraphDataFetchAttempted(false)
// Clear last successful query label to ensure labels are fetched
// Clear last successful query label to ensure labels are fetched,
// which is the key to forcing a data refresh.
useGraphStore.getState().setLastSuccessfulQueryLabel('')
// Get current label
const currentLabel = useSettingsStore.getState().queryLabel
// Reset to default label to ensure consistency
useSettingsStore.getState().setQueryLabel('*')
// If current label is empty, use default label '*'
if (!currentLabel) {
useSettingsStore.getState().setQueryLabel('*')
} else {
// Trigger data reload
useSettingsStore.getState().setQueryLabel('')
setTimeout(() => {
useSettingsStore.getState().setQueryLabel(currentLabel)
}, 0)
}
// Force a data refresh by incrementing the version counter in the graph store.
// This is the reliable way to trigger a re-fetch of the graph data.
useGraphStore.getState().incrementGraphDataVersion()
}, []);
return (
@ -160,6 +137,11 @@ const GraphLabels = () => {
newLabel = '*';
}
// Add selected label to search history (except for special cases)
if (newLabel && newLabel !== '*' && newLabel !== '...' && newLabel.trim() !== '') {
SearchHistoryManager.addToHistory(newLabel);
}
// Reset graphDataFetchAttempted flag to ensure data fetch is triggered
useGraphStore.getState().setGraphDataFetchAttempted(false);
@ -167,6 +149,7 @@ const GraphLabels = () => {
useSettingsStore.getState().setQueryLabel(newLabel);
}}
clearable={false} // Prevent clearing value on reselect
debounceTime={500}
/>
</div>
)

View file

@ -419,7 +419,7 @@ export default function QuerySettings() {
className="mr-1 cursor-pointer"
id="only_need_context"
checked={querySettings.only_need_context}
onCheckedChange={(checked) => {
onCheckedChange={(checked) => {
handleChange('only_need_context', checked)
if (checked) {
handleChange('only_need_prompt', false)

View file

@ -63,6 +63,8 @@ export interface AsyncSelectProps<T> {
triggerTooltip?: string
/** Allow clearing the selection */
clearable?: boolean
/** Debounce time in milliseconds */
debounceTime?: number
}
export function AsyncSelect<T>({
@ -84,7 +86,8 @@ export function AsyncSelect<T>({
searchInputClassName,
noResultsMessage,
triggerTooltip,
clearable = true
clearable = true,
debounceTime = 150
}: AsyncSelectProps<T>) {
const [mounted, setMounted] = useState(false)
const [open, setOpen] = useState(false)
@ -94,7 +97,7 @@ export function AsyncSelect<T>({
const [selectedValue, setSelectedValue] = useState(value)
const [selectedOption, setSelectedOption] = useState<T | null>(null)
const [searchTerm, setSearchTerm] = useState('')
const debouncedSearchTerm = useDebounce(searchTerm, preload ? 0 : 150)
const debouncedSearchTerm = useDebounce(searchTerm, preload ? 0 : debounceTime)
const [originalOptions, setOriginalOptions] = useState<T[]>([])
const [initialValueDisplay, setInitialValueDisplay] = useState<React.ReactNode | null>(null)
@ -130,8 +133,8 @@ export function AsyncSelect<T>({
try {
setLoading(true)
setError(null)
// If we have a value, use it for the initial search
const data = await fetcher(value)
// Always use empty query for initial load to show search history
const data = await fetcher('')
setOriginalOptions(data)
setOptions(data)
} catch (err) {
@ -144,7 +147,7 @@ export function AsyncSelect<T>({
if (!mounted) {
initializeOptions()
}
}, [mounted, fetcher, value])
}, [mounted, fetcher])
useEffect(() => {
const fetchOptions = async () => {
@ -245,21 +248,18 @@ export function AsyncSelect<T>({
</CommandEmpty>
))}
<CommandGroup>
{options.map((option, index) => {
{options.map((option) => {
const optionValue = getOptionValue(option);
// Use index as a safe value that won't be trimmed by cmdk
const safeValue = `option-${index}-${optionValue.length}`;
// Fix cmdk filtering issue: use empty string when search is empty
// This ensures all items are shown when searchTerm is empty
const itemValue = searchTerm.trim() === '' ? '' : optionValue;
return (
<CommandItem
key={optionValue}
value={safeValue}
onSelect={(selectedSafeValue) => {
// Extract the original value from the safe value
const selectedIndex = parseInt(selectedSafeValue.split('-')[1]);
const originalValue = getOptionValue(options[selectedIndex]);
console.log(`CommandItem onSelect: safeValue='${selectedSafeValue}', originalValue='${originalValue}' (length: ${originalValue.length})`);
handleSelect(originalValue);
value={itemValue}
onSelect={() => {
handleSelect(optionValue);
}}
className="truncate"
>

View file

@ -225,18 +225,6 @@ export type EdgeType = {
const fetchGraph = async (label: string, maxDepth: number, maxNodes: number) => {
let rawData: any = null;
// Check if we need to fetch all database labels first
const lastSuccessfulQueryLabel = useGraphStore.getState().lastSuccessfulQueryLabel;
if (!lastSuccessfulQueryLabel) {
console.log('Last successful queryLabel is empty');
try {
await useGraphStore.getState().fetchAllDatabaseLabels();
} catch (e) {
console.error('Failed to fetch all database labels:', e);
// Continue with graph fetch even if labels fetch fails
}
}
// Trigger GraphLabels component to check if the label is valid
// console.log('Setting labelsFetchAttempted to true');
useGraphStore.getState().setLabelsFetchAttempted(true)
@ -411,6 +399,7 @@ const useLightrangeGraph = () => {
const isFetching = useGraphStore.use.isFetching()
const nodeToExpand = useGraphStore.use.nodeToExpand()
const nodeToPrune = useGraphStore.use.nodeToPrune()
const graphDataVersion = useGraphStore.use.graphDataVersion()
// Use ref to track if data has been loaded and initial load
@ -597,7 +586,7 @@ const useLightrangeGraph = () => {
state.setLastSuccessfulQueryLabel('') // Clear last successful query label on error
})
}
}, [queryLabel, maxQueryDepth, maxNodes, isFetching, t])
}, [queryLabel, maxQueryDepth, maxNodes, isFetching, t, graphDataVersion])
// Handle node expansion
useEffect(() => {

View file

@ -20,6 +20,17 @@ export const edgeColorHighlighted = '#B2EBF2'
export const searchResultLimit = 50
export const labelListLimit = 100
// Search History Configuration
export const searchHistoryMaxItems = 500
export const searchHistoryVersion = '1.0'
// API Request Limits
export const popularLabelsDefaultLimit = 300
export const searchLabelsDefaultLimit = 50
// UI Display Limits
export const dropdownDisplayLimit = 300
export const minNodeSize = 4
export const maxNodeSize = 20

View file

@ -336,7 +336,7 @@
"label": "التسمية",
"placeholder": "ابحث في التسميات...",
"andOthers": "و {{count}} آخرون",
"refreshTooltip": "إعادة تحميل البيانات (بعد إضافة الملف)"
"refreshTooltip": "إعادة تعيين بيانات الرسم البياني وسجل البحث (مطلوب بعد تغيير المستندات)"
},
"emptyGraph": "فارغ (حاول إعادة التحميل)"
},

View file

@ -336,7 +336,7 @@
"label": "Label",
"placeholder": "Search labels...",
"andOthers": "And {count} others",
"refreshTooltip": "Reload data(After file added)"
"refreshTooltip": "Reset graph data and search history (required after document changes)"
},
"emptyGraph": "Empty(Try Reload Again)"
},

View file

@ -336,7 +336,7 @@
"label": "Étiquette",
"placeholder": "Rechercher des étiquettes...",
"andOthers": "Et {{count}} autres",
"refreshTooltip": "Recharger les données (Après l'ajout de fichier)"
"refreshTooltip": "Réinitialiser les données du graphe et l'historique de recherche (requis après modification des documents)"
},
"emptyGraph": "Vide (Essayez de recharger)"
},

View file

@ -336,7 +336,7 @@
"label": "标签",
"placeholder": "搜索标签...",
"andOthers": "还有 {count} 个",
"refreshTooltip": "重载图形数据(添加文件后需重载)"
"refreshTooltip": "重置图形数据和搜索历史(文档变化后需重置)"
},
"emptyGraph": "无数据(请重载图形数据)"
},

View file

@ -336,7 +336,7 @@
"label": "標籤",
"placeholder": "搜尋標籤...",
"andOthers": "還有 {count} 個",
"refreshTooltip": "重載圖形數據(新增檔案後需重載)"
"refreshTooltip": "重設圖形資料和搜尋歷史(文件變化後需重設)"
},
"emptyGraph": "無數據(請重載圖形數據)"
},

View file

@ -1,7 +1,6 @@
import { create } from 'zustand'
import { createSelectors } from '@/lib/utils'
import { DirectedGraph } from 'graphology'
import { getGraphLabels } from '@/api/lightrag'
import MiniSearch from 'minisearch'
export type RawNodeType = {
@ -84,7 +83,6 @@ interface GraphState {
rawGraph: RawGraph | null
sigmaGraph: DirectedGraph | null
sigmaInstance: any | null
allDatabaseLabels: string[]
searchEngine: MiniSearch | null
@ -113,8 +111,6 @@ interface GraphState {
setRawGraph: (rawGraph: RawGraph | null) => void
setSigmaGraph: (sigmaGraph: DirectedGraph | null) => void
setAllDatabaseLabels: (labels: string[]) => void
fetchAllDatabaseLabels: () => Promise<void>
setIsFetching: (isFetching: boolean) => void
// 搜索引擎方法
@ -160,7 +156,6 @@ const useGraphStoreBase = create<GraphState>()((set, get) => ({
rawGraph: null,
sigmaGraph: null,
sigmaInstance: null,
allDatabaseLabels: ['*'],
typeColorMap: new Map<string, string>(),
@ -207,21 +202,6 @@ const useGraphStoreBase = create<GraphState>()((set, get) => ({
set({ sigmaGraph });
},
setAllDatabaseLabels: (labels: string[]) => set({ allDatabaseLabels: labels }),
fetchAllDatabaseLabels: async () => {
try {
console.log('Fetching all database labels...');
const labels = await getGraphLabels();
set({ allDatabaseLabels: ['*', ...labels] });
return;
} catch (error) {
console.error('Failed to fetch all database labels:', error);
set({ allDatabaseLabels: ['*'] });
throw error;
}
},
setMoveToSelectedNode: (moveToSelectedNode?: boolean) => set({ moveToSelectedNode }),
setSigmaInstance: (instance: any) => set({ sigmaInstance: instance }),

View file

@ -0,0 +1,259 @@
import { searchHistoryMaxItems, searchHistoryVersion } from '@/lib/constants'
/**
* SearchHistoryManager - Manages search history persistence in localStorage
*
* This utility class handles:
* - Storing and retrieving search history from localStorage
* - Managing history size limits
* - Sorting by access time and frequency
* - Version compatibility
*/
export interface SearchHistoryItem {
label: string // Label name
lastAccessed: number // Last access timestamp
accessCount: number // Access count for sorting optimization
}
export interface SearchHistoryData {
items: SearchHistoryItem[]
version: string // Data version for compatibility
workspace?: string // Workspace isolation (if needed)
}
export class SearchHistoryManager {
private static readonly STORAGE_KEY = 'lightrag_search_history'
private static readonly MAX_HISTORY = searchHistoryMaxItems
private static readonly VERSION = searchHistoryVersion
/**
* Get search history from localStorage
* @returns Array of search history items sorted by last accessed time (descending)
*/
static getHistory(): SearchHistoryItem[] {
try {
const data = localStorage.getItem(this.STORAGE_KEY)
if (!data) return []
const parsed: SearchHistoryData = JSON.parse(data)
// Version compatibility check
if (parsed.version !== this.VERSION) {
console.warn(`Search history version mismatch. Expected ${this.VERSION}, got ${parsed.version}. Clearing history.`)
this.clearHistory()
return []
}
// Ensure items is an array
if (!Array.isArray(parsed.items)) {
console.warn('Invalid search history format. Clearing history.')
this.clearHistory()
return []
}
// Sort by last accessed time (descending) then by access count (descending)
return parsed.items.sort((a, b) => {
if (b.lastAccessed !== a.lastAccessed) {
return b.lastAccessed - a.lastAccessed
}
return (b.accessCount || 0) - (a.accessCount || 0)
})
} catch (error) {
console.error('Error reading search history:', error)
this.clearHistory()
return []
}
}
/**
* Add a label to search history (or update if exists)
* @param label Label to add to history
*/
static addToHistory(label: string): void {
if (!label || typeof label !== 'string' || label.trim() === '') {
return
}
try {
const history = this.getHistory()
const now = Date.now()
const trimmedLabel = label.trim()
// Find existing item
const existingIndex = history.findIndex(item => item.label === trimmedLabel)
if (existingIndex >= 0) {
// Update existing item
const existingItem = history[existingIndex]
existingItem.lastAccessed = now
existingItem.accessCount = (existingItem.accessCount || 0) + 1
// Move to front (will be sorted properly when saved)
history.splice(existingIndex, 1)
history.unshift(existingItem)
} else {
// Add new item to the beginning
history.unshift({
label: trimmedLabel,
lastAccessed: now,
accessCount: 1
})
}
// Limit history size
if (history.length > this.MAX_HISTORY) {
history.splice(this.MAX_HISTORY)
}
// Save to localStorage
const data: SearchHistoryData = {
items: history,
version: this.VERSION
}
localStorage.setItem(this.STORAGE_KEY, JSON.stringify(data))
} catch (error) {
console.error('Error saving search history:', error)
}
}
/**
* Clear all search history
*/
static clearHistory(): void {
try {
localStorage.removeItem(this.STORAGE_KEY)
} catch (error) {
console.error('Error clearing search history:', error)
}
}
/**
* Initialize history with default popular labels if empty
* @param popularLabels Array of popular labels to use as defaults
*/
static async initializeWithDefaults(popularLabels: string[]): Promise<void> {
const history = this.getHistory()
if (history.length === 0 && popularLabels.length > 0) {
try {
const now = Date.now()
const defaultItems: SearchHistoryItem[] = popularLabels.map((label, index) => ({
label: label.trim(),
lastAccessed: now - index, // Ensure proper ordering
accessCount: 0 // Mark as default/popular items
}))
const data: SearchHistoryData = {
items: defaultItems,
version: this.VERSION
}
localStorage.setItem(this.STORAGE_KEY, JSON.stringify(data))
} catch (error) {
console.error('Error initializing search history with defaults:', error)
}
}
}
/**
* Get recent searches (items with accessCount > 0)
* @param limit Maximum number of recent searches to return
* @returns Array of recent search items
*/
static getRecentSearches(limit: number = 10): SearchHistoryItem[] {
const history = this.getHistory()
return history
.filter(item => item.accessCount > 0)
.slice(0, limit)
}
/**
* Get popular recommendations (items with accessCount = 0, i.e., defaults)
* @param limit Maximum number of recommendations to return
* @returns Array of popular recommendation items
*/
static getPopularRecommendations(limit?: number): SearchHistoryItem[] {
const history = this.getHistory()
const recommendations = history.filter(item => item.accessCount === 0)
return limit ? recommendations.slice(0, limit) : recommendations
}
/**
* Get all history items as simple string array
* @param limit Maximum number of items to return
* @returns Array of label strings
*/
static getHistoryLabels(limit?: number): string[] {
const history = this.getHistory()
const labels = history.map(item => item.label)
return limit ? labels.slice(0, limit) : labels
}
/**
* Check if a label exists in history
* @param label Label to check
* @returns True if label exists in history
*/
static hasLabel(label: string): boolean {
if (!label || typeof label !== 'string') return false
const history = this.getHistory()
return history.some(item => item.label === label.trim())
}
/**
* Remove a specific label from history
* @param label Label to remove
*/
static removeLabel(label: string): void {
if (!label || typeof label !== 'string') return
try {
const history = this.getHistory()
const trimmedLabel = label.trim()
const filteredHistory = history.filter(item => item.label !== trimmedLabel)
if (filteredHistory.length !== history.length) {
const data: SearchHistoryData = {
items: filteredHistory,
version: this.VERSION
}
localStorage.setItem(this.STORAGE_KEY, JSON.stringify(data))
}
} catch (error) {
console.error('Error removing label from search history:', error)
}
}
/**
* Get storage statistics
* @returns Object with history statistics
*/
static getStats(): {
totalItems: number
recentSearches: number
popularRecommendations: number
storageSize: number
} {
const history = this.getHistory()
const recentCount = history.filter(item => item.accessCount > 0).length
const popularCount = history.filter(item => item.accessCount === 0).length
let storageSize = 0
try {
const data = localStorage.getItem(this.STORAGE_KEY)
storageSize = data ? data.length : 0
} catch {
// Ignore error
}
return {
totalItems: history.length,
recentSearches: recentCount,
popularRecommendations: popularCount,
storageSize
}
}
}