Merge branch 'main' into multi-lang-codegraph
This commit is contained in:
commit
c983a60e22
4 changed files with 66 additions and 75 deletions
|
|
@ -21,16 +21,16 @@ from cognee.shared.data_models import KnowledgeGraph
|
|||
from cognee.modules.storage.utils import JSONEncoder
|
||||
|
||||
|
||||
try:
|
||||
from codingagents.coding_rule_associations import (
|
||||
add_rule_associations,
|
||||
get_existing_rules,
|
||||
)
|
||||
except ModuleNotFoundError:
|
||||
from .codingagents.coding_rule_associations import (
|
||||
add_rule_associations,
|
||||
get_existing_rules,
|
||||
)
|
||||
# try:
|
||||
# from codingagents.coding_rule_associations import (
|
||||
# add_rule_associations,
|
||||
# get_existing_rules,
|
||||
# )
|
||||
# except ModuleNotFoundError:
|
||||
# from .codingagents.coding_rule_associations import (
|
||||
# add_rule_associations,
|
||||
# get_existing_rules,
|
||||
# )
|
||||
|
||||
|
||||
mcp = FastMCP("Cognee")
|
||||
|
|
@ -221,14 +221,6 @@ async def cognify(data: str, graph_model_file: str = None, graph_model_name: str
|
|||
- The actual cognify process may take significant time depending on text length
|
||||
- Use the cognify_status tool to check the progress of the operation
|
||||
|
||||
Raises
|
||||
------
|
||||
InvalidValueError
|
||||
If LLM_API_KEY is not set
|
||||
ValueError
|
||||
If chunks exceed max token limits (reduce chunk_size)
|
||||
DatabaseNotCreatedError
|
||||
If databases are not properly initialized
|
||||
"""
|
||||
|
||||
async def cognify_task(
|
||||
|
|
@ -306,7 +298,7 @@ async def save_interaction(data: str) -> list:
|
|||
logger.info("Save interaction process finished.")
|
||||
logger.info("Generating associated rules from interaction data.")
|
||||
|
||||
await add_rule_associations(data=data, rules_nodeset_name="coding_agent_rules")
|
||||
# await add_rule_associations(data=data, rules_nodeset_name="coding_agent_rules")
|
||||
|
||||
logger.info("Associated rules generated from interaction data.")
|
||||
|
||||
|
|
@ -512,14 +504,6 @@ async def search(search_query: str, search_type: str) -> list:
|
|||
- Different search types produce different output formats
|
||||
- The function handles the conversion between Cognee's internal result format and MCP's output format
|
||||
|
||||
Raises
|
||||
------
|
||||
InvalidValueError
|
||||
If LLM_API_KEY is not set (for LLM-based search types)
|
||||
ValueError
|
||||
If query_text is empty or search parameters are invalid
|
||||
NoDataError
|
||||
If no relevant data found for the search query
|
||||
"""
|
||||
|
||||
async def search_task(search_query: str, search_type: str) -> str:
|
||||
|
|
@ -576,8 +560,10 @@ async def get_developer_rules() -> list:
|
|||
async def fetch_rules_from_cognee() -> str:
|
||||
"""Collect all developer rules from Cognee"""
|
||||
with redirect_stdout(sys.stderr):
|
||||
developer_rules = await get_existing_rules(rules_nodeset_name="coding_agent_rules")
|
||||
return developer_rules
|
||||
note = "This is broken in 0.2.2"
|
||||
return note
|
||||
# developer_rules = await get_existing_rules(rules_nodeset_name="coding_agent_rules")
|
||||
# return developer_rules
|
||||
|
||||
rules_text = await fetch_rules_from_cognee()
|
||||
|
||||
|
|
|
|||
40
poetry.lock
generated
40
poetry.lock
generated
|
|
@ -1,4 +1,4 @@
|
|||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiobotocore"
|
||||
|
|
@ -4002,6 +4002,8 @@ python-versions = "*"
|
|||
groups = ["main"]
|
||||
files = [
|
||||
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
|
||||
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
|
||||
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -4649,20 +4651,20 @@ typing-extensions = ">=4.7"
|
|||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.30"
|
||||
version = "0.3.29"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"deepeval\""
|
||||
files = [
|
||||
{file = "langchain_openai-0.3.30-py3-none-any.whl", hash = "sha256:280f1f31004393228e3f75ff8353b1aae86bbc282abc7890a05beb5f43b89923"},
|
||||
{file = "langchain_openai-0.3.30.tar.gz", hash = "sha256:90df37509b2dcf5e057f491326fcbf78cf2a71caff5103a5a7de560320171842"},
|
||||
{file = "langchain_openai-0.3.29-py3-none-any.whl", hash = "sha256:71ae6791b3e017ec892a8062f993edc882c6665fd8385aa66e9dc3bff8205996"},
|
||||
{file = "langchain_openai-0.3.29.tar.gz", hash = "sha256:83a0455f8ce874aa1806131ca3b4db08e482be037b7457a9b3ca21a213d2ab47"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.3.74,<1.0.0"
|
||||
openai = ">=1.99.9,<2.0.0"
|
||||
openai = ">=1.86.0,<2.0.0"
|
||||
tiktoken = ">=0.7,<1"
|
||||
|
||||
[[package]]
|
||||
|
|
@ -4805,32 +4807,35 @@ valkey = ["valkey (>=6)"]
|
|||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.70.4"
|
||||
version = "1.75.8"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
optional = false
|
||||
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "litellm-1.70.4-py3-none-any.whl", hash = "sha256:4d14d04bf5e2bd49336b4abc59193352c731ff371022e4fcf590208f41f644f7"},
|
||||
{file = "litellm-1.70.4.tar.gz", hash = "sha256:ef6749a091faaaf88313afe4111cdd95736e1e60f21ba894e74f7c5bab2870bd"},
|
||||
{file = "litellm-1.75.8-py3-none-any.whl", hash = "sha256:0bf004488df8506381ec6e35e1486e2870e8d578a7c3f2427cd497558ce07a2e"},
|
||||
{file = "litellm-1.75.8.tar.gz", hash = "sha256:92061bd263ff8c33c8fff70ba92cd046adb7ea041a605826a915d108742fe59e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = "*"
|
||||
aiohttp = ">=3.10"
|
||||
click = "*"
|
||||
httpx = ">=0.23.0"
|
||||
importlib-metadata = ">=6.8.0"
|
||||
jinja2 = ">=3.1.2,<4.0.0"
|
||||
jsonschema = ">=4.22.0,<5.0.0"
|
||||
openai = ">=1.68.2"
|
||||
pydantic = ">=2.0.0,<3.0.0"
|
||||
openai = ">=1.99.5"
|
||||
pydantic = ">=2.5.0,<3.0.0"
|
||||
python-dotenv = ">=0.2.0"
|
||||
tiktoken = ">=0.7.0"
|
||||
tokenizers = "*"
|
||||
|
||||
[package.extras]
|
||||
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"]
|
||||
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "boto3 (==1.34.34)", "cryptography (>=43.0.1,<44.0.0)", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.5)", "litellm-proxy-extras (==0.1.21)", "mcp (==1.5.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
|
||||
caching = ["diskcache (>=5.6.1,<6.0.0)"]
|
||||
extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0,<0.9.0)"]
|
||||
mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""]
|
||||
proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography (>=43.0.1,<44.0.0)", "fastapi (>=0.115.5,<0.116.0)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.19)", "litellm-proxy-extras (==0.2.17)", "mcp (>=1.10.0,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "uvicorn (>=0.29.0,<0.30.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=13.1.0,<14.0.0)"]
|
||||
semantic-router = ["semantic-router ; python_version >= \"3.9\""]
|
||||
utils = ["numpydoc"]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -6841,14 +6846,14 @@ sympy = "*"
|
|||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.99.9"
|
||||
version = "1.99.8"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a"},
|
||||
{file = "openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92"},
|
||||
{file = "openai-1.99.8-py3-none-any.whl", hash = "sha256:426b981079cffde6dd54868b9b84761ffa291cde77010f051b96433e1835b47d"},
|
||||
{file = "openai-1.99.8.tar.gz", hash = "sha256:4b49845983eb4d5ffae9bae5d98bd5c0bd3a709a30f8b994fc8f316961b6d566"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7966,6 +7971,7 @@ files = [
|
|||
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
|
||||
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
|
||||
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
|
||||
{file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"},
|
||||
{file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
|
||||
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
|
||||
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
|
||||
|
|
@ -12470,4 +12476,4 @@ posthog = ["posthog"]
|
|||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<=3.13"
|
||||
content-hash = "7682898d3c726a0b1e3d08560ceb26180e1841e89a4a417377ee876b55878a9b"
|
||||
content-hash = "7363d5497ee6fe961440d73ef9a1d1eb2410eeb7fd01761d89fc9ac668216261"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[project]
|
||||
name = "cognee"
|
||||
|
||||
version = "0.2.2"
|
||||
version = "0.2.3"
|
||||
description = "Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning."
|
||||
authors = [
|
||||
{ name = "Vasilije Markovic" },
|
||||
|
|
@ -20,7 +20,7 @@ classifiers = [
|
|||
"Operating System :: Microsoft :: Windows",
|
||||
]
|
||||
dependencies = [
|
||||
"openai>=1.80.1,<2",
|
||||
"openai>=1.80.1,<1.99.9",
|
||||
"python-dotenv>=1.0.1,<2.0.0",
|
||||
"pydantic>=2.10.5,<3.0.0",
|
||||
"pydantic-settings>=2.2.1,<3",
|
||||
|
|
@ -34,7 +34,7 @@ dependencies = [
|
|||
"sqlalchemy>=2.0.39,<3.0.0",
|
||||
"aiosqlite>=0.20.0,<1.0.0",
|
||||
"tiktoken>=0.8.0,<1.0.0",
|
||||
"litellm>=1.57.4, <1.71.0",
|
||||
"litellm>=1.71.0, <2.0.0",
|
||||
"instructor>=1.9.1,<2.0.0",
|
||||
"langfuse>=2.32.0,<3",
|
||||
"filetype>=1.2.0,<2.0.0",
|
||||
|
|
|
|||
51
uv.lock
generated
51
uv.lock
generated
|
|
@ -1,5 +1,5 @@
|
|||
version = 1
|
||||
revision = 2
|
||||
revision = 3
|
||||
requires-python = ">=3.10, <=3.13"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
|
|
@ -65,7 +65,6 @@ dependencies = [
|
|||
{ name = "aiohappyeyeballs" },
|
||||
{ name = "aiosignal" },
|
||||
{ name = "async-timeout", version = "4.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "async-timeout", version = "5.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_version < '0'" },
|
||||
{ name = "attrs" },
|
||||
{ name = "frozenlist" },
|
||||
{ name = "multidict" },
|
||||
|
|
@ -870,7 +869,7 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "cognee"
|
||||
version = "0.2.2.dev0"
|
||||
version = "0.2.3"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "aiofiles" },
|
||||
|
|
@ -1061,7 +1060,7 @@ requires-dist = [
|
|||
{ name = "langfuse", specifier = ">=2.32.0,<3" },
|
||||
{ name = "langsmith", marker = "extra == 'langchain'", specifier = ">=0.2.3,<1.0.0" },
|
||||
{ name = "limits", specifier = ">=4.4.1,<5" },
|
||||
{ name = "litellm", specifier = ">=1.57.4,<1.71.0" },
|
||||
{ name = "litellm", specifier = ">=1.71.0,<2.0.0" },
|
||||
{ name = "llama-index-core", marker = "extra == 'llama-index'", specifier = ">=0.12.11,<0.13" },
|
||||
{ name = "matplotlib", specifier = ">=3.8.3,<4" },
|
||||
{ name = "mistral-common", marker = "extra == 'mistral'", specifier = ">=1.5.2,<2" },
|
||||
|
|
@ -1077,7 +1076,7 @@ requires-dist = [
|
|||
{ name = "notebook", marker = "extra == 'notebook'", specifier = ">=7.1.0,<8" },
|
||||
{ name = "numpy", specifier = ">=1.26.4,<=4.0.0" },
|
||||
{ name = "onnxruntime", specifier = ">=1.0.0,<2.0.0" },
|
||||
{ name = "openai", specifier = ">=1.80.1,<2" },
|
||||
{ name = "openai", specifier = ">=1.80.1,<1.99.9" },
|
||||
{ name = "pandas", specifier = ">=2.2.2,<3.0.0" },
|
||||
{ name = "pgvector", marker = "extra == 'postgres'", specifier = ">=0.3.5,<0.4" },
|
||||
{ name = "pgvector", marker = "extra == 'postgres-binary'", specifier = ">=0.3.5,<0.4" },
|
||||
|
|
@ -1862,17 +1861,17 @@ name = "fastembed"
|
|||
version = "0.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "huggingface-hub" },
|
||||
{ name = "loguru" },
|
||||
{ name = "mmh3" },
|
||||
{ name = "huggingface-hub", marker = "python_full_version < '3.13'" },
|
||||
{ name = "loguru", marker = "python_full_version < '3.13'" },
|
||||
{ name = "mmh3", marker = "python_full_version < '3.13'" },
|
||||
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
|
||||
{ name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
|
||||
{ name = "onnxruntime" },
|
||||
{ name = "pillow" },
|
||||
{ name = "py-rust-stemmers" },
|
||||
{ name = "requests" },
|
||||
{ name = "tokenizers" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.12.*'" },
|
||||
{ name = "onnxruntime", marker = "python_full_version < '3.13'" },
|
||||
{ name = "pillow", marker = "python_full_version < '3.13'" },
|
||||
{ name = "py-rust-stemmers", marker = "python_full_version < '3.13'" },
|
||||
{ name = "requests", marker = "python_full_version < '3.13'" },
|
||||
{ name = "tokenizers", marker = "python_full_version < '3.13'" },
|
||||
{ name = "tqdm", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c6/f4/036a656c605f63dc25f11284f60f69900a54a19c513e1ae60d21d6977e75/fastembed-0.6.0.tar.gz", hash = "sha256:5c9ead25f23449535b07243bbe1f370b820dcc77ec2931e61674e3fe7ff24733", size = 50731, upload-time = "2025-02-26T13:50:33.031Z" }
|
||||
wheels = [
|
||||
|
|
@ -3458,16 +3457,16 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.30"
|
||||
version = "0.3.29"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "openai" },
|
||||
{ name = "tiktoken" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ad/21/6b2024cdd907812d33d31d42c05baa6a3fc6b341d76f7a982730b6985501/langchain_openai-0.3.30.tar.gz", hash = "sha256:90df37509b2dcf5e057f491326fcbf78cf2a71caff5103a5a7de560320171842", size = 766426, upload-time = "2025-08-12T17:05:55.587Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/56/2e2010d15118ac52760f92ebf6ce75b3508e7a1023107ea04233fd6263e0/langchain_openai-0.3.29.tar.gz", hash = "sha256:83a0455f8ce874aa1806131ca3b4db08e482be037b7457a9b3ca21a213d2ab47", size = 766499, upload-time = "2025-08-08T15:12:32.402Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/23/36/cd370071243ae321c22bfafbf75fef1601dd22d0baeeedb71835954ed0ad/langchain_openai-0.3.30-py3-none-any.whl", hash = "sha256:280f1f31004393228e3f75ff8353b1aae86bbc282abc7890a05beb5f43b89923", size = 74362, upload-time = "2025-08-12T17:05:54.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/f2/a6a73beec15e90605e6a24c4498a8592d79a72c8e81c18ed0f5e9b7308e9/langchain_openai-0.3.29-py3-none-any.whl", hash = "sha256:71ae6791b3e017ec892a8062f993edc882c6665fd8385aa66e9dc3bff8205996", size = 74316, upload-time = "2025-08-08T15:12:30.794Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3553,7 +3552,7 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.70.4"
|
||||
version = "1.75.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
|
|
@ -3568,9 +3567,9 @@ dependencies = [
|
|||
{ name = "tiktoken" },
|
||||
{ name = "tokenizers" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/d7/d0d76ba896a1e8978550dcc76157d1c50910ba9ade4ef3981a34f01f4fa6/litellm-1.70.4.tar.gz", hash = "sha256:ef6749a091faaaf88313afe4111cdd95736e1e60f21ba894e74f7c5bab2870bd", size = 7813817, upload-time = "2025-05-23T00:05:24.47Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8d/4e/48e3d6de19afe713223e3bc7009a2003501420de2a5d823c569cefbd9731/litellm-1.75.8.tar.gz", hash = "sha256:92061bd263ff8c33c8fff70ba92cd046adb7ea041a605826a915d108742fe59e", size = 10140384, upload-time = "2025-08-16T21:42:24.23Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/8f/0b26ecb08b8282ae0fdfa2223b5df8263579c9e3c75ca96bb7fb7cbc632c/litellm-1.70.4-py3-none-any.whl", hash = "sha256:4d14d04bf5e2bd49336b4abc59193352c731ff371022e4fcf590208f41f644f7", size = 7903749, upload-time = "2025-05-23T00:05:21.017Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/82/c4d00fbeafd93c00dab6ea03f33cadd6a97adeb720ba1d89fc319e5cb10b/litellm-1.75.8-py3-none-any.whl", hash = "sha256:0bf004488df8506381ec6e35e1486e2870e8d578a7c3f2427cd497558ce07a2e", size = 8916305, upload-time = "2025-08-16T21:42:21.387Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3849,8 +3848,8 @@ name = "loguru"
|
|||
version = "0.7.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "win32-setctime", marker = "sys_platform == 'win32'" },
|
||||
{ name = "colorama", marker = "python_full_version < '3.13' and sys_platform == 'win32'" },
|
||||
{ name = "win32-setctime", marker = "python_full_version < '3.13' and sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
|
||||
wheels = [
|
||||
|
|
@ -5021,7 +5020,7 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.99.9"
|
||||
version = "1.99.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
|
|
@ -5033,9 +5032,9 @@ dependencies = [
|
|||
{ name = "tqdm" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8a/d2/ef89c6f3f36b13b06e271d3cc984ddd2f62508a0972c1cbcc8485a6644ff/openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92", size = 506992, upload-time = "2025-08-12T02:31:10.054Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4b/81/288157471c43975cc849bc8779b8c7209aec6da5d7cbcd87a982912a19e5/openai-1.99.8.tar.gz", hash = "sha256:4b49845983eb4d5ffae9bae5d98bd5c0bd3a709a30f8b994fc8f316961b6d566", size = 506953, upload-time = "2025-08-11T20:19:02.312Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/fb/df274ca10698ee77b07bff952f302ea627cc12dac6b85289485dd77db6de/openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a", size = 786816, upload-time = "2025-08-12T02:31:08.34Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/b6/3940f037aa33e6d5aa00707fd02843a1cac06ee0e106f39cfb71d0653d23/openai-1.99.8-py3-none-any.whl", hash = "sha256:426b981079cffde6dd54868b9b84761ffa291cde77010f051b96433e1835b47d", size = 786821, upload-time = "2025-08-11T20:18:59.943Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue