cognee/cognee-mcp/pyproject.toml
Igor Ilic 2323fd0c94
feat: Add gemini ollama support for cognee-mcp [COG-1408] (#583)
<!-- .github/pull_request_template.md -->

## Description
Add gemini ollama support for cognee mcp

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin


<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

- **New Features**
- Expanded the system’s capabilities by updating its underlying
integrations, providing enhanced functionality and performance
improvements for end-users.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2025-03-01 19:51:48 +01:00

30 lines
515 B
TOML

[project]
name = "cognee-mcp"
version = "0.1.0"
description = "A MCP server project"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"cognee[codegraph,gemini,huggingface]",
"mcp==1.2.1",
]
[[project.authors]]
name = "Rita Aleksziev"
email = "rita@topoteretes.com"
[build-system]
requires = [ "hatchling", ]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src"]
[dependency-groups]
dev = [
"debugpy>=1.8.12",
]
[project.scripts]
cognee = "src:main"