23 KiB
Quickstart
Get started with Cognee quickly and efficiently
export const GraphVisualization = () => {
const svgRef = useRef(null);
const tooltipRef = useRef(null);
useEffect(() => {
const loadD3 = async () => {
if (typeof window === 'undefined') return;
if (!window.d3) {
const script = document.createElement('script');
script.src = 'https://d3js.org/d3.v5.min.js';
script.async = true;
document.body.appendChild(script);
await new Promise(resolve => {
script.onload = resolve;
});
}
initializeGraph();
};
const initializeGraph = () => {
const d3 = window.d3;
if (!d3 || !svgRef.current || !tooltipRef.current) return;
const nodes = [{
"name": "",
"type": "DocumentChunk",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["text"]
},
"belongs_to_set": null,
"text": "Cognee turns documents into AI memory.",
"chunk_size": 14,
"chunk_index": 0,
"cut_type": "sentence_end",
"id": "9e42824c-569a-5b89-808f-8b8ece56476e",
"color": "#801212"
}, {
"name": "cognee",
"type": "Entity",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["name"]
},
"belongs_to_set": null,
"description": "Tool that turns documents into AI memory.",
"id": "2d09f34b-aee1-5d32-a2f9-23eb7ed48b24",
"color": "#f47710"
}, {
"name": "product",
"type": "EntityType",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["name"]
},
"belongs_to_set": null,
"description": "product",
"id": "379107f4-bd9d-5823-990f-cc4becfeccfc",
"color": "#6510f4"
}, {
"name": "documents",
"type": "Entity",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["name"]
},
"belongs_to_set": null,
"description": "Source materials processed by Cognee.",
"id": "f4653eeb-de7f-51f4-b5ab-516f8d3bf5e7",
"color": "#f47710"
}, {
"name": "concept",
"type": "EntityType",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["name"]
},
"belongs_to_set": null,
"description": "concept",
"id": "dd9713b7-dc20-5101-aad0-1c4216811147",
"color": "#6510f4"
}, {
"name": "ai memory",
"type": "Entity",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["name"]
},
"belongs_to_set": null,
"description": "Representation of documents as memory usable by AI.",
"id": "af397680-8867-5ec5-9ba9-dddd39dfd7a8",
"color": "#f47710"
}, {
"name": "text_document",
"type": "TextDocument",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["name"]
},
"belongs_to_set": null,
"raw_data_location": "",
"external_metadata": "{}",
"mime_type": "text/plain",
"id": "d08449e0-0ce8-52e9-97ab-44e1e8efc4a7",
"color": "#D3D3D3"
}, {
"name": "",
"type": "TextSummary",
"ontology_valid": false,
"version": 1,
"topological_rank": 0,
"metadata": {
"index_fields": ["text"]
},
"belongs_to_set": null,
"text": "Cognee transforms documents into AI memory.",
"id": "59c04417-3fad-53dd-a7b5-dd2ce28dd2dc",
"color": "#1077f4"
}];
const links = [{
"source": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target": "2d09f34b-aee1-5d32-a2f9-23eb7ed48b24",
"relation": "contains",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target_node_id": "2d09f34b-aee1-5d32-a2f9-23eb7ed48b24",
"relationship_name": "contains",
"updated_at": "2025-12-04 08:54:17"
}
}, {
"source": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target": "f4653eeb-de7f-51f4-b5ab-516f8d3bf5e7",
"relation": "contains",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target_node_id": "f4653eeb-de7f-51f4-b5ab-516f8d3bf5e7",
"relationship_name": "contains",
"updated_at": "2025-12-04 08:54:17"
}
}, {
"source": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target": "af397680-8867-5ec5-9ba9-dddd39dfd7a8",
"relation": "contains",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target_node_id": "af397680-8867-5ec5-9ba9-dddd39dfd7a8",
"relationship_name": "contains",
"updated_at": "2025-12-04 08:54:17"
}
}, {
"source": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target": "d08449e0-0ce8-52e9-97ab-44e1e8efc4a7",
"relation": "is_part_of",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "9e42824c-569a-5b89-808f-8b8ece56476e",
"target_node_id": "d08449e0-0ce8-52e9-97ab-44e1e8efc4a7",
"relationship_name": "is_part_of",
"updated_at": "2025-12-04 08:54:17"
}
}, {
"source": "2d09f34b-aee1-5d32-a2f9-23eb7ed48b24",
"target": "379107f4-bd9d-5823-990f-cc4becfeccfc",
"relation": "is_a",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "2d09f34b-aee1-5d32-a2f9-23eb7ed48b24",
"target_node_id": "379107f4-bd9d-5823-990f-cc4becfeccfc",
"relationship_name": "is_a",
"updated_at": "2025-12-04 08:54:17"
}
}, {
"source": "2d09f34b-aee1-5d32-a2f9-23eb7ed48b24",
"target": "af397680-8867-5ec5-9ba9-dddd39dfd7a8",
"relation": "converts_documents_into",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"relationship_name": "converts_documents_into",
"source_node_id": "2d09f34b-aee1-5d32-a2f9-23eb7ed48b24",
"target_node_id": "af397680-8867-5ec5-9ba9-dddd39dfd7a8",
"ontology_valid": false
}
}, {
"source": "f4653eeb-de7f-51f4-b5ab-516f8d3bf5e7",
"target": "dd9713b7-dc20-5101-aad0-1c4216811147",
"relation": "is_a",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "f4653eeb-de7f-51f4-b5ab-516f8d3bf5e7",
"target_node_id": "dd9713b7-dc20-5101-aad0-1c4216811147",
"relationship_name": "is_a",
"updated_at": "2025-12-04 08:54:17"
}
}, {
"source": "af397680-8867-5ec5-9ba9-dddd39dfd7a8",
"target": "dd9713b7-dc20-5101-aad0-1c4216811147",
"relation": "is_a",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "af397680-8867-5ec5-9ba9-dddd39dfd7a8",
"target_node_id": "dd9713b7-dc20-5101-aad0-1c4216811147",
"relationship_name": "is_a",
"updated_at": "2025-12-04 08:54:17"
}
}, {
"source": "59c04417-3fad-53dd-a7b5-dd2ce28dd2dc",
"target": "9e42824c-569a-5b89-808f-8b8ece56476e",
"relation": "made_from",
"weight": null,
"all_weights": {},
"relationship_type": null,
"edge_info": {
"source_node_id": "59c04417-3fad-53dd-a7b5-dd2ce28dd2dc",
"target_node_id": "9e42824c-569a-5b89-808f-8b8ece56476e",
"relationship_name": "made_from",
"updated_at": "2025-12-04 08:54:17"
}
}];
d3.select(svgRef.current).selectAll("*").remove();
const svg = d3.select(svgRef.current);
const width = 800;
const height = 600;
svg.attr("width", width).attr("height", height);
const container = svg.append("g");
const tooltip = d3.select(tooltipRef.current);
const simulation = d3.forceSimulation(nodes).force("link", d3.forceLink(links).id(d => d.id).strength(0.1)).force("charge", d3.forceManyBody().strength(-275)).force("center", d3.forceCenter(width / 2, height / 2)).force("x", d3.forceX().strength(0.1).x(width / 2)).force("y", d3.forceY().strength(0.1).y(height / 2));
const link = container.append("g").attr("class", "links").selectAll("line").data(links).enter().append("line").attr("stroke-width", d => {
if (d.weight) return Math.max(2, d.weight * 5);
if (d.all_weights && Object.keys(d.all_weights).length > 0) {
const avgWeight = Object.values(d.all_weights).reduce((a, b) => a + b, 0) / Object.values(d.all_weights).length;
return Math.max(2, avgWeight * 5);
}
return 2;
}).attr("class", d => {
if (d.all_weights && Object.keys(d.all_weights).length > 1) return "multi-weighted";
if (d.weight || d.all_weights && Object.keys(d.all_weights).length > 0) return "weighted";
return "";
}).on("mouseover", function (d) {
let content = "Edge Information
";
content += "Relationship: " + d.relation + "
";
if (d.all_weights && Object.keys(d.all_weights).length > 0) {
content += "Weights:
";
Object.keys(d.all_weights).forEach(function (weightName) {
content += " " + weightName + ": " + d.all_weights[weightName] + "
";
});
} else if (d.weight !== null && d.weight !== undefined) {
content += "Weight: " + d.weight + "
";
}
if (d.relationship_type) {
content += "Type: " + d.relationship_type + "
";
}
if (d.edge_info) {
Object.keys(d.edge_info).forEach(function (key) {
if (key !== 'weight' && key !== 'weights' && key !== 'relationship_type' && key !== 'source_node_id' && key !== 'target_node_id' && key !== 'relationship_name' && key !== 'updated_at' && !key.startsWith('weight_')) {
content += key + ": " + d.edge_info[key] + "
";
}
});
}
tooltip.html(content).style("left", d3.event.pageX + 10 + "px").style("top", d3.event.pageY - 10 + "px").style("opacity", 1);
}).on("mouseout", function () {
tooltip.style("opacity", 0);
});
const edgeLabels = container.append("g").attr("class", "edge-labels").selectAll("text").data(links).enter().append("text").attr("class", "edge-label").text(d => {
let label = d.relation;
if (d.all_weights && Object.keys(d.all_weights).length > 1) {
label += " (" + Object.keys(d.all_weights).length + " weights)";
} else if (d.weight) {
label += " (" + d.weight + ")";
} else if (d.all_weights && Object.keys(d.all_weights).length === 1) {
const singleWeight = Object.values(d.all_weights)[0];
label += " (" + singleWeight + ")";
}
return label;
});
const nodeGroup = container.append("g").attr("class", "nodes").selectAll("g").data(nodes).enter().append("g");
const node = nodeGroup.append("circle").attr("r", 13).attr("fill", d => d.color).call(d3.drag().on("start", dragstarted).on("drag", dragged).on("end", dragended));
nodeGroup.append("text").attr("class", "node-label").attr("dy", 4).attr("text-anchor", "middle").text(d => d.name);
node.append("title").text(d => JSON.stringify(d));
simulation.on("tick", function () {
link.attr("x1", d => d.source.x).attr("y1", d => d.source.y).attr("x2", d => d.target.x).attr("y2", d => d.target.y);
edgeLabels.attr("x", d => (d.source.x + d.target.x) / 2).attr("y", d => (d.source.y + d.target.y) / 2 - 5);
node.attr("cx", d => d.x).attr("cy", d => d.y);
nodeGroup.select("text").attr("x", d => d.x).attr("y", d => d.y).attr("dy", 4).attr("text-anchor", "middle");
});
svg.call(d3.zoom().on("zoom", function () {
container.attr("transform", d3.event.transform);
}));
function dragstarted(d) {
if (!d3.event.active) simulation.alphaTarget(0.3).restart();
d.fx = d.x;
d.fy = d.y;
}
function dragged(d) {
d.fx = d3.event.x;
d.fy = d3.event.y;
}
function dragended(d) {
if (!d3.event.active) simulation.alphaTarget(0);
d.fx = null;
d.fy = null;
}
};
loadD3();
}, []);
return <div style={{
position: 'relative',
width: '100%',
height: '600px'
}}>
<svg ref={svgRef} style={{
width: '100%',
height: '100%',
display: 'block',
background: 'linear-gradient(90deg, #101010, #1a1a2e)'
}} />
<div ref={tooltipRef} style={{
position: 'absolute',
textAlign: 'left',
padding: '8px',
fontSize: '12px',
background: 'rgba(0, 0, 0, 0.9)',
color: 'white',
border: '1px solid rgba(255, 255, 255, 0.3)',
borderRadius: '4px',
pointerEvents: 'none',
opacity: 0,
transition: 'opacity 0.2s',
zIndex: 1000,
maxWidth: '300px',
wordWrap: 'break-word'
}} />
<svg style={{
position: 'absolute',
bottom: '10px',
right: '10px',
width: '150px',
height: 'auto',
zIndex: 9999
}} viewBox="0 0 158 44" fill="none" xmlns="http://www.w3.org/2000/svg">
After completing the installation steps successfully, run your first Cognee example to see AI memory in action.
Basic Usage
This minimal example shows how to add content, process it, and perform a search:
import cognee
import asyncio
async def main():
# Create a clean slate for cognee -- reset data and system state
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)
# Add sample content
text = "Cognee turns documents into AI memory."
await cognee.add(text)
# Process with LLMs to build the knowledge graph
await cognee.cognify()
# Search the knowledge graph
results = await cognee.search(
query_text="What does Cognee do?"
)
# Print
for result in results:
print(result)
if __name__ == '__main__':
asyncio.run(main())
Interactive knowledge graph visualization -- drag nodes, zoom, and hover for details. Create your own visualization with 2 additional lines of code [here](/guides/graph-visualization).
What just happened
The code demonstrates Cognee's three core operations:
.add— Adds data to Cognee so they can be cognified. In this case, we added a single string ("Cognee turns documents into AI memory"); from Cognee's perspective, this string is a document..cognify— This is where the cognification happens. All documents are chunked, entities are extracted, relationships are made, and summaries are generated. In this case, we can expect entities like Frodo, One Ring, and Mordor..search— Queries the knowledge graph using vector similarity and graph traversal to find relevant information and return contextual results.
About async / await in Cognee
**Cognee uses asynchronous code extensively.** That means many of its functions are defined with `async` and must be called with `await`. This lets Python handle waiting (e.g. for I/O or network calls) without blocking the rest of your program.
This example uses async / await, Python’s way of doing asynchronous programming.
Asynchronous programming is used when functions may block because they are waiting for something (for example, a reply from an API call). By writing async def, you define a function that can pause at certain points.
The await keyword marks those calls that may need to pause.
To run such functions, Python provides the asyncio library. It uses a loop, called the event loop, which executes your code in order but, whenever a function is waiting, can temporarily run another one. From inside your function, though, everything still runs top-to-bottom: each line after an await only executes once the awaited call has finished.
* A good starting point is this [guide](https://realpython.com/async-io-python/).
* Official documentation is available [here](https://docs.python.org/3/library/asyncio.html).
Next Steps
Learn about Cognee's core concepts, architecture, building blocks, and main operations.To find navigation and other pages in this documentation, fetch the llms.txt file at: https://docs.cognee.ai/llms.txt