ruff format

This commit is contained in:
vasilije 2025-07-06 18:05:06 +02:00
parent ea035a1bce
commit 53b8df8506
61 changed files with 1619 additions and 610 deletions

View file

@ -2,9 +2,13 @@ from uuid import NAMESPACE_OID, uuid5
from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.vector import get_vector_engine from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
)
from cognee.low_level import DataPoint from cognee.low_level import DataPoint
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
from cognee.modules.engine.models import NodeSet from cognee.modules.engine.models import NodeSet
from cognee.tasks.storage import add_data_points, index_graph_edges from cognee.tasks.storage import add_data_points, index_graph_edges

View file

@ -7,7 +7,9 @@ from cognee.modules.cognify.config import get_cognify_config
from cognee.infrastructure.data.chunking.config import get_chunk_config from cognee.infrastructure.data.chunking.config import get_chunk_config
from cognee.infrastructure.databases.vector import get_vectordb_config from cognee.infrastructure.databases.vector import get_vectordb_config
from cognee.infrastructure.databases.graph.config import get_graph_config from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from cognee.infrastructure.databases.relational import get_relational_config, get_migration_config from cognee.infrastructure.databases.relational import get_relational_config, get_migration_config
from cognee.infrastructure.files.storage import LocalStorage from cognee.infrastructure.files.storage import LocalStorage

View file

@ -17,7 +17,9 @@ from cognee.api.v1.responses.models import (
) )
from cognee.api.v1.responses.dispatch_function import dispatch_function from cognee.api.v1.responses.dispatch_function import dispatch_function
from cognee.api.v1.responses.default_tools import DEFAULT_TOOLS from cognee.api.v1.responses.default_tools import DEFAULT_TOOLS
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from cognee.modules.users.models import User from cognee.modules.users.models import User
from cognee.modules.users.methods import get_authenticated_user from cognee.modules.users.methods import get_authenticated_user

View file

@ -1,8 +1,13 @@
from typing import Any, Dict, List from typing import Any, Dict, List
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.eval_framework.evaluation.base_eval_adapter import BaseEvalAdapter from cognee.eval_framework.evaluation.base_eval_adapter import BaseEvalAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
from cognee.eval_framework.eval_config import EvalConfig from cognee.eval_framework.eval_config import EvalConfig

View file

@ -5,7 +5,9 @@ import litellm
import os import os
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import TikTokenTokenizer from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import (
TikTokenTokenizer,
)
litellm.set_verbose = False litellm.set_verbose = False
logger = get_logger("FastembedEmbeddingEngine") logger = get_logger("FastembedEmbeddingEngine")

View file

@ -7,10 +7,18 @@ import litellm
import os import os
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.Gemini import GeminiTokenizer from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.Gemini import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import HuggingFaceTokenizer GeminiTokenizer,
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer import MistralTokenizer )
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import TikTokenTokenizer from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import (
HuggingFaceTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer import (
MistralTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import (
TikTokenTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import ( from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import (
embedding_rate_limit_async, embedding_rate_limit_async,
embedding_sleep_and_retry_async, embedding_sleep_and_retry_async,

View file

@ -7,7 +7,9 @@ import os
import aiohttp.http_exceptions import aiohttp.http_exceptions
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import HuggingFaceTokenizer from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import (
HuggingFaceTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import ( from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import (
embedding_rate_limit_async, embedding_rate_limit_async,
embedding_sleep_and_retry_async, embedding_sleep_and_retry_async,

View file

@ -1,5 +1,7 @@
from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from .EmbeddingEngine import EmbeddingEngine from .EmbeddingEngine import EmbeddingEngine
from functools import lru_cache from functools import lru_cache

View file

@ -1,4 +1,12 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import get_max_chunk_tokens get_llm_config,
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import test_llm_connection )
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import test_embedding_connection from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
get_max_chunk_tokens,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
test_llm_connection,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
test_embedding_connection,
)

View file

@ -42,7 +42,6 @@ with EnsureBamlPyImport(__version__) as e:
from .sync_client import b from .sync_client import b
# FOR LEGACY COMPATIBILITY, expose "partial_types" as an alias for "stream_types" # FOR LEGACY COMPATIBILITY, expose "partial_types" as an alias for "stream_types"
# WE RECOMMEND USERS TO USE "stream_types" INSTEAD # WE RECOMMEND USERS TO USE "stream_types" INSTEAD
partial_types = stream_types partial_types = stream_types

View file

@ -36,10 +36,13 @@ class BamlAsyncClient:
self.__llm_response_parser = LlmResponseParser(options) self.__llm_response_parser = LlmResponseParser(options)
self.__llm_stream_parser = LlmStreamParser(options) self.__llm_stream_parser = LlmStreamParser(options)
def with_options(self, def with_options(
self,
tb: typing.Optional[type_builder.TypeBuilder] = None, tb: typing.Optional[type_builder.TypeBuilder] = None,
client_registry: typing.Optional[baml_py.baml_py.ClientRegistry] = None, client_registry: typing.Optional[baml_py.baml_py.ClientRegistry] = None,
collector: typing.Optional[typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]] = None, collector: typing.Optional[
typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]
] = None,
env: typing.Optional[typing.Dict[str, typing.Optional[str]]] = None, env: typing.Optional[typing.Dict[str, typing.Optional[str]]] = None,
) -> "BamlAsyncClient": ) -> "BamlAsyncClient":
options: BamlCallOptions = {} options: BamlCallOptions = {}
@ -73,49 +76,133 @@ class BamlAsyncClient:
def parse_stream(self): def parse_stream(self):
return self.__llm_stream_parser return self.__llm_stream_parser
async def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, async def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraph", args={ result = await self.__options.merge_options(baml_options).call_function_async(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
async def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraphGeneric", args={ result = await self.__options.merge_options(baml_options).call_function_async(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphGeneric",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
async def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraphWithAnthropic", args={ result = await self.__options.merge_options(baml_options).call_function_async(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphWithAnthropic",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
async def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None, "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraphWithEnvPrompt", args={ result = await self.__options.merge_options(baml_options).call_function_async(
"content": content,"prompt_override": prompt_override, function_name="ExtractContentGraphWithEnvPrompt",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
async def SummarizeCode(self, content: str, "prompt_override": prompt_override,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.SummarizedCode: ) -> types.SummarizedCode:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="SummarizeCode", args={ result = await self.__options.merge_options(baml_options).call_function_async(
function_name="SummarizeCode",
args={
"content": content, "content": content,
}) },
return typing.cast(types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__)) )
async def SummarizeContent(self, content: str, return typing.cast(
types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.SummarizedContent: ) -> types.SummarizedContent:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="SummarizeContent", args={ result = await self.__options.merge_options(baml_options).call_function_async(
function_name="SummarizeContent",
args={
"content": content, "content": content,
}) },
return typing.cast(types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__)) )
return typing.cast(
types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__)
)
class BamlStreamClient: class BamlStreamClient:
@ -124,76 +211,179 @@ class BamlStreamClient:
def __init__(self, options: DoNotUseDirectlyCallManager): def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraph", args={ ctx, result = self.__options.merge_options(baml_options).create_async_stream(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}) args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraphGeneric", args={ ctx, result = self.__options.merge_options(baml_options).create_async_stream(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphGeneric",
}) args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraphWithAnthropic", args={ ctx, result = self.__options.merge_options(baml_options).create_async_stream(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphWithAnthropic",
}) args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraphWithEnvPrompt", args={ ctx, result = self.__options.merge_options(baml_options).create_async_stream(
"content": content,"prompt_override": prompt_override, function_name="ExtractContentGraphWithEnvPrompt",
}) args={
"content": content,
"prompt_override": prompt_override,
},
)
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def SummarizeCode(self, content: str,
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.SummarizedCode, types.SummarizedCode]: ) -> baml_py.BamlStream[stream_types.SummarizedCode, types.SummarizedCode]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="SummarizeCode", args={ ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="SummarizeCode",
args={
"content": content, "content": content,
}) },
)
return baml_py.BamlStream[stream_types.SummarizedCode, types.SummarizedCode]( return baml_py.BamlStream[stream_types.SummarizedCode, types.SummarizedCode](
result, result,
lambda x: typing.cast(stream_types.SummarizedCode, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.SummarizedCode,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def SummarizeContent(self, content: str,
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.SummarizedContent, types.SummarizedContent]: ) -> baml_py.BamlStream[stream_types.SummarizedContent, types.SummarizedContent]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="SummarizeContent", args={ ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="SummarizeContent",
args={
"content": content, "content": content,
}) },
)
return baml_py.BamlStream[stream_types.SummarizedContent, types.SummarizedContent]( return baml_py.BamlStream[stream_types.SummarizedContent, types.SummarizedContent](
result, result,
lambda x: typing.cast(stream_types.SummarizedContent, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.SummarizedContent,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
@ -204,47 +394,126 @@ class BamlHttpRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager): def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options self.__options = options
async def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, async def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraph", args={ result = await self.__options.merge_options(baml_options).create_http_request_async(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}, mode="request") args={
return result
async def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
return result
async def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
return result
async def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="request")
return result
async def SummarizeCode(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeCode", args={
"content": content, "content": content,
}, mode="request") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result return result
async def SummarizeContent(self, content: str,
async def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeContent", args={ result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphGeneric",
args={
"content": content, "content": content,
}, mode="request") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
async def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
async def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="request",
)
return result
async def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeCode",
args={
"content": content,
},
mode="request",
)
return result
async def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeContent",
args={
"content": content,
},
mode="request",
)
return result return result
@ -254,47 +523,126 @@ class BamlHttpStreamRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager): def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options self.__options = options
async def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, async def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraph", args={ result = await self.__options.merge_options(baml_options).create_http_request_async(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}, mode="stream") args={
return result
async def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
async def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
async def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="stream")
return result
async def SummarizeCode(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeCode", args={
"content": content, "content": content,
}, mode="stream") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result return result
async def SummarizeContent(self, content: str,
async def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeContent", args={ result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphGeneric",
args={
"content": content, "content": content,
}, mode="stream") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
async def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
async def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="stream",
)
return result
async def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeCode",
args={
"content": content,
},
mode="stream",
)
return result
async def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeContent",
args={
"content": content,
},
mode="stream",
)
return result return result

View file

@ -19,17 +19,19 @@ from .inlinedbaml import get_baml_files
from typing import Dict from typing import Dict
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME = BamlRuntime.from_files( DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME = BamlRuntime.from_files(
"baml_src", "baml_src", get_baml_files(), os.environ.copy()
get_baml_files(),
os.environ.copy()
) )
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX = BamlCtxManager(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME) DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX = BamlCtxManager(
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME
)
def reset_baml_env_vars(env_vars: Dict[str, str]): def reset_baml_env_vars(env_vars: Dict[str, str]):
warnings.warn( warnings.warn(
"reset_baml_env_vars is deprecated and should be removed. Environment variables are now lazily loaded on each function call", "reset_baml_env_vars is deprecated and should be removed. Environment variables are now lazily loaded on each function call",
DeprecationWarning, DeprecationWarning,
stacklevel=2 stacklevel=2,
) )
__all__ = [] __all__ = []

File diff suppressed because one or more lines are too long

View file

@ -16,6 +16,7 @@ import typing_extensions
from . import stream_types, types from . import stream_types, types
from .runtime import DoNotUseDirectlyCallManager, BamlCallOptions from .runtime import DoNotUseDirectlyCallManager, BamlCallOptions
class LlmResponseParser: class LlmResponseParser:
__options: DoNotUseDirectlyCallManager __options: DoNotUseDirectlyCallManager
@ -23,43 +24,70 @@ class LlmResponseParser:
self.__options = options self.__options = options
def ExtractContentGraph( def ExtractContentGraph(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraph", llm_response=llm_response, mode="request") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraph", llm_response=llm_response, mode="request"
)
return typing.cast(types.KnowledgeGraph, result) return typing.cast(types.KnowledgeGraph, result)
def ExtractContentGraphGeneric( def ExtractContentGraphGeneric(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="request") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="request"
)
return typing.cast(types.KnowledgeGraph, result) return typing.cast(types.KnowledgeGraph, result)
def ExtractContentGraphWithAnthropic( def ExtractContentGraphWithAnthropic(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithAnthropic", llm_response=llm_response, mode="request") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithAnthropic",
llm_response=llm_response,
mode="request",
)
return typing.cast(types.KnowledgeGraph, result) return typing.cast(types.KnowledgeGraph, result)
def ExtractContentGraphWithEnvPrompt( def ExtractContentGraphWithEnvPrompt(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithEnvPrompt", llm_response=llm_response, mode="request") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithEnvPrompt",
llm_response=llm_response,
mode="request",
)
return typing.cast(types.KnowledgeGraph, result) return typing.cast(types.KnowledgeGraph, result)
def SummarizeCode( def SummarizeCode(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedCode: ) -> types.SummarizedCode:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeCode", llm_response=llm_response, mode="request") result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeCode", llm_response=llm_response, mode="request"
)
return typing.cast(types.SummarizedCode, result) return typing.cast(types.SummarizedCode, result)
def SummarizeContent( def SummarizeContent(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedContent: ) -> types.SummarizedContent:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeContent", llm_response=llm_response, mode="request") result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeContent", llm_response=llm_response, mode="request"
)
return typing.cast(types.SummarizedContent, result) return typing.cast(types.SummarizedContent, result)
class LlmStreamParser: class LlmStreamParser:
__options: DoNotUseDirectlyCallManager __options: DoNotUseDirectlyCallManager
@ -67,39 +95,65 @@ class LlmStreamParser:
self.__options = options self.__options = options
def ExtractContentGraph( def ExtractContentGraph(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph: ) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraph", llm_response=llm_response, mode="stream") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraph", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.KnowledgeGraph, result) return typing.cast(stream_types.KnowledgeGraph, result)
def ExtractContentGraphGeneric( def ExtractContentGraphGeneric(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph: ) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="stream") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.KnowledgeGraph, result) return typing.cast(stream_types.KnowledgeGraph, result)
def ExtractContentGraphWithAnthropic( def ExtractContentGraphWithAnthropic(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph: ) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithAnthropic", llm_response=llm_response, mode="stream") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithAnthropic",
llm_response=llm_response,
mode="stream",
)
return typing.cast(stream_types.KnowledgeGraph, result) return typing.cast(stream_types.KnowledgeGraph, result)
def ExtractContentGraphWithEnvPrompt( def ExtractContentGraphWithEnvPrompt(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph: ) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithEnvPrompt", llm_response=llm_response, mode="stream") result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithEnvPrompt",
llm_response=llm_response,
mode="stream",
)
return typing.cast(stream_types.KnowledgeGraph, result) return typing.cast(stream_types.KnowledgeGraph, result)
def SummarizeCode( def SummarizeCode(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.SummarizedCode: ) -> stream_types.SummarizedCode:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeCode", llm_response=llm_response, mode="stream") result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeCode", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.SummarizedCode, result) return typing.cast(stream_types.SummarizedCode, result)
def SummarizeContent( def SummarizeContent(
self, llm_response: str, baml_options: BamlCallOptions = {}, self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.SummarizedContent: ) -> stream_types.SummarizedContent:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeContent", llm_response=llm_response, mode="stream") result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeContent", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.SummarizedContent, result) return typing.cast(stream_types.SummarizedContent, result)

View file

@ -17,7 +17,10 @@ import typing_extensions
import baml_py import baml_py
from . import types, stream_types, type_builder from . import types, stream_types, type_builder
from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME as __runtime__, DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX as __ctx__manager__ from .globals import (
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME as __runtime__,
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX as __ctx__manager__,
)
class BamlCallOptions(typing.TypedDict, total=False): class BamlCallOptions(typing.TypedDict, total=False):
@ -48,9 +51,6 @@ class _ResolvedBamlOptions:
self.env_vars = env_vars self.env_vars = env_vars
class DoNotUseDirectlyCallManager: class DoNotUseDirectlyCallManager:
def __init__(self, baml_options: BamlCallOptions): def __init__(self, baml_options: BamlCallOptions):
self.__baml_options = baml_options self.__baml_options = baml_options
@ -74,7 +74,9 @@ class DoNotUseDirectlyCallManager:
collectors_as_list = ( collectors_as_list = (
collector collector
if isinstance(collector, list) if isinstance(collector, list)
else [collector] if collector is not None else [] else [collector]
if collector is not None
else []
) )
env_vars = os.environ.copy() env_vars = os.environ.copy()
for k, v in self.__baml_options.get("env", {}).items(): for k, v in self.__baml_options.get("env", {}).items():
@ -164,7 +166,9 @@ class DoNotUseDirectlyCallManager:
*, *,
function_name: str, function_name: str,
args: typing.Dict[str, typing.Any], args: typing.Dict[str, typing.Any],
) -> typing.Tuple[baml_py.baml_py.RuntimeContextManager, baml_py.baml_py.SyncFunctionResultStream]: ) -> typing.Tuple[
baml_py.baml_py.RuntimeContextManager, baml_py.baml_py.SyncFunctionResultStream
]:
resolved_options = self.__resolve() resolved_options = self.__resolve()
ctx = __ctx__manager__.get() ctx = __ctx__manager__.get()
result = __runtime__.stream_function_sync( result = __runtime__.stream_function_sync(
@ -232,7 +236,13 @@ class DoNotUseDirectlyCallManager:
mode == "stream", mode == "stream",
) )
def parse_response(self, *, function_name: str, llm_response: str, mode: typing_extensions.Literal["stream", "request"]) -> typing.Any: def parse_response(
self,
*,
function_name: str,
llm_response: str,
mode: typing_extensions.Literal["stream", "request"],
) -> typing.Any:
resolved_options = self.__resolve() resolved_options = self.__resolve()
return __runtime__.parse_llm_response( return __runtime__.parse_llm_response(
function_name, function_name,

View file

@ -18,14 +18,19 @@ import baml_py
from . import types from . import types
StreamStateValueT = typing.TypeVar('StreamStateValueT') StreamStateValueT = typing.TypeVar("StreamStateValueT")
class StreamState(BaseModel, typing.Generic[StreamStateValueT]): class StreamState(BaseModel, typing.Generic[StreamStateValueT]):
value: StreamStateValueT value: StreamStateValueT
state: typing_extensions.Literal["Pending", "Incomplete", "Complete"] state: typing_extensions.Literal["Pending", "Incomplete", "Complete"]
# ######################################################################### # #########################################################################
# Generated classes (7) # Generated classes (7)
# ######################################################################### # #########################################################################
class Edge(BaseModel): class Edge(BaseModel):
# doc string for edge # doc string for edge
# doc string for source_node_id # doc string for source_node_id
@ -34,23 +39,27 @@ class Edge(BaseModel):
target_node_id: typing.Optional[str] = None target_node_id: typing.Optional[str] = None
relationship_name: typing.Optional[str] = None relationship_name: typing.Optional[str] = None
class KnowledgeGraph(BaseModel): class KnowledgeGraph(BaseModel):
nodes: typing.List["types.Node"] nodes: typing.List["types.Node"]
edges: typing.List["Edge"] edges: typing.List["Edge"]
class Node(BaseModel): class Node(BaseModel):
model_config = ConfigDict(extra='allow') model_config = ConfigDict(extra="allow")
id: typing.Optional[str] = None id: typing.Optional[str] = None
name: typing.Optional[str] = None name: typing.Optional[str] = None
type: typing.Optional[str] = None type: typing.Optional[str] = None
description: typing.Optional[str] = None description: typing.Optional[str] = None
class SummarizedClass(BaseModel): class SummarizedClass(BaseModel):
name: typing.Optional[str] = None name: typing.Optional[str] = None
description: typing.Optional[str] = None description: typing.Optional[str] = None
methods: typing.Optional[typing.List["SummarizedFunction"]] = None methods: typing.Optional[typing.List["SummarizedFunction"]] = None
decorators: typing.Optional[typing.List[str]] = None decorators: typing.Optional[typing.List[str]] = None
class SummarizedCode(BaseModel): class SummarizedCode(BaseModel):
high_level_summary: typing.Optional[str] = None high_level_summary: typing.Optional[str] = None
key_features: typing.List[str] key_features: typing.List[str]
@ -60,10 +69,12 @@ class SummarizedCode(BaseModel):
functions: typing.List["SummarizedFunction"] functions: typing.List["SummarizedFunction"]
workflow_description: typing.Optional[str] = None workflow_description: typing.Optional[str] = None
class SummarizedContent(BaseModel): class SummarizedContent(BaseModel):
summary: typing.Optional[str] = None summary: typing.Optional[str] = None
description: typing.Optional[str] = None description: typing.Optional[str] = None
class SummarizedFunction(BaseModel): class SummarizedFunction(BaseModel):
name: typing.Optional[str] = None name: typing.Optional[str] = None
description: typing.Optional[str] = None description: typing.Optional[str] = None
@ -71,6 +82,7 @@ class SummarizedFunction(BaseModel):
outputs: typing.Optional[typing.List[str]] = None outputs: typing.Optional[typing.List[str]] = None
decorators: typing.Optional[typing.List[str]] = None decorators: typing.Optional[typing.List[str]] = None
# ######################################################################### # #########################################################################
# Generated type aliases (0) # Generated type aliases (0)
# ######################################################################### # #########################################################################

View file

@ -19,6 +19,7 @@ from .parser import LlmResponseParser, LlmStreamParser
from .runtime import DoNotUseDirectlyCallManager, BamlCallOptions from .runtime import DoNotUseDirectlyCallManager, BamlCallOptions
from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME as __runtime__ from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME as __runtime__
class BamlSyncClient: class BamlSyncClient:
__options: DoNotUseDirectlyCallManager __options: DoNotUseDirectlyCallManager
__stream_client: "BamlStreamClient" __stream_client: "BamlStreamClient"
@ -48,10 +49,13 @@ class BamlSyncClient:
self.__llm_response_parser = LlmResponseParser(self.__options) self.__llm_response_parser = LlmResponseParser(self.__options)
self.__llm_stream_parser = LlmStreamParser(self.__options) self.__llm_stream_parser = LlmStreamParser(self.__options)
def with_options(self, def with_options(
self,
tb: typing.Optional[type_builder.TypeBuilder] = None, tb: typing.Optional[type_builder.TypeBuilder] = None,
client_registry: typing.Optional[baml_py.baml_py.ClientRegistry] = None, client_registry: typing.Optional[baml_py.baml_py.ClientRegistry] = None,
collector: typing.Optional[typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]] = None, collector: typing.Optional[
typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]
] = None,
env: typing.Optional[typing.Dict[str, typing.Optional[str]]] = None, env: typing.Optional[typing.Dict[str, typing.Optional[str]]] = None,
) -> "BamlSyncClient": ) -> "BamlSyncClient":
options: BamlCallOptions = {} options: BamlCallOptions = {}
@ -85,49 +89,133 @@ class BamlSyncClient:
def parse_stream(self): def parse_stream(self):
return self.__llm_stream_parser return self.__llm_stream_parser
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraph", args={ result = self.__options.merge_options(baml_options).call_function_sync(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraphGeneric", args={ result = self.__options.merge_options(baml_options).call_function_sync(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphGeneric",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraphWithAnthropic", args={ result = self.__options.merge_options(baml_options).call_function_sync(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphWithAnthropic",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None, "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph: ) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraphWithEnvPrompt", args={ result = self.__options.merge_options(baml_options).call_function_sync(
"content": content,"prompt_override": prompt_override, function_name="ExtractContentGraphWithEnvPrompt",
}) args={
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)) "content": content,
def SummarizeCode(self, content: str, "prompt_override": prompt_override,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.SummarizedCode: ) -> types.SummarizedCode:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="SummarizeCode", args={ result = self.__options.merge_options(baml_options).call_function_sync(
function_name="SummarizeCode",
args={
"content": content, "content": content,
}) },
return typing.cast(types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__)) )
def SummarizeContent(self, content: str, return typing.cast(
types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__)
)
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> types.SummarizedContent: ) -> types.SummarizedContent:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="SummarizeContent", args={ result = self.__options.merge_options(baml_options).call_function_sync(
function_name="SummarizeContent",
args={
"content": content, "content": content,
}) },
return typing.cast(types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__)) )
return typing.cast(
types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__)
)
class BamlStreamClient: class BamlStreamClient:
@ -136,76 +224,179 @@ class BamlStreamClient:
def __init__(self, options: DoNotUseDirectlyCallManager): def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraph", args={ ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}) args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraphGeneric", args={ ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphGeneric",
}) args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraphWithAnthropic", args={ ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraphWithAnthropic",
}) args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]: ) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraphWithEnvPrompt", args={ ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
"content": content,"prompt_override": prompt_override, function_name="ExtractContentGraphWithEnvPrompt",
}) args={
"content": content,
"prompt_override": prompt_override,
},
)
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]( return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result, result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def SummarizeCode(self, content: str,
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.SummarizedCode, types.SummarizedCode]: ) -> baml_py.BamlSyncStream[stream_types.SummarizedCode, types.SummarizedCode]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="SummarizeCode", args={ ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="SummarizeCode",
args={
"content": content, "content": content,
}) },
)
return baml_py.BamlSyncStream[stream_types.SummarizedCode, types.SummarizedCode]( return baml_py.BamlSyncStream[stream_types.SummarizedCode, types.SummarizedCode](
result, result,
lambda x: typing.cast(stream_types.SummarizedCode, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.SummarizedCode,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
def SummarizeContent(self, content: str,
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.SummarizedContent, types.SummarizedContent]: ) -> baml_py.BamlSyncStream[stream_types.SummarizedContent, types.SummarizedContent]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="SummarizeContent", args={ ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="SummarizeContent",
args={
"content": content, "content": content,
}) },
)
return baml_py.BamlSyncStream[stream_types.SummarizedContent, types.SummarizedContent]( return baml_py.BamlSyncStream[stream_types.SummarizedContent, types.SummarizedContent](
result, result,
lambda x: typing.cast(stream_types.SummarizedContent, x.cast_to(types, types, stream_types, True, __runtime__)), lambda x: typing.cast(
lambda x: typing.cast(types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)), stream_types.SummarizedContent,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx, ctx,
) )
@ -216,47 +407,126 @@ class BamlHttpRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager): def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraph", args={ result = self.__options.merge_options(baml_options).create_http_request_sync(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}, mode="request") args={
return result
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
return result
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
return result
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="request")
return result
def SummarizeCode(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeCode", args={
"content": content, "content": content,
}, mode="request") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result return result
def SummarizeContent(self, content: str,
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeContent", args={ result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphGeneric",
args={
"content": content, "content": content,
}, mode="request") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="request",
)
return result
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeCode",
args={
"content": content,
},
mode="request",
)
return result
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeContent",
args={
"content": content,
},
mode="request",
)
return result return result
@ -266,47 +536,126 @@ class BamlHttpStreamRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager): def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None, def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraph", args={ result = self.__options.merge_options(baml_options).create_http_request_sync(
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content, function_name="ExtractContentGraph",
}, mode="stream") args={
return result
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="stream")
return result
def SummarizeCode(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeCode", args={
"content": content, "content": content,
}, mode="stream") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result return result
def SummarizeContent(self, content: str,
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {}, baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest: ) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeContent", args={ result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphGeneric",
args={
"content": content, "content": content,
}, mode="stream") "mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="stream",
)
return result
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeCode",
args={
"content": content,
},
mode="stream",
)
return result
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeContent",
args={
"content": content,
},
mode="stream",
)
return result return result

View file

@ -14,9 +14,13 @@ from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX
trace = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.trace_fn trace = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.trace_fn
set_tags = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.upsert_tags set_tags = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.upsert_tags
def flush(): def flush():
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.flush() DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.flush()
on_log_event = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.on_log_event on_log_event = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.on_log_event
__all__ = ['trace', 'set_tags', "flush", "on_log_event"] __all__ = ["trace", "set_tags", "flush", "on_log_event"]

View file

@ -15,19 +15,29 @@ from baml_py import type_builder
from baml_py import baml_py from baml_py import baml_py
from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME
class TypeBuilder(type_builder.TypeBuilder): class TypeBuilder(type_builder.TypeBuilder):
def __init__(self): def __init__(self):
super().__init__(classes=set( super().__init__(
["Edge","KnowledgeGraph","Node","SummarizedClass","SummarizedCode","SummarizedContent","SummarizedFunction",] classes=set(
), enums=set( [
[] "Edge",
), runtime=DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME) "KnowledgeGraph",
"Node",
"SummarizedClass",
"SummarizedCode",
"SummarizedContent",
"SummarizedFunction",
]
),
enums=set([]),
runtime=DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME,
)
# ######################################################################### # #########################################################################
# Generated enums 0 # Generated enums 0
# ######################################################################### # #########################################################################
# ######################################################################### # #########################################################################
# Generated classes 7 # Generated classes 7
# ######################################################################### # #########################################################################
@ -61,7 +71,6 @@ class TypeBuilder(type_builder.TypeBuilder):
return SummarizedFunctionViewer(self) return SummarizedFunctionViewer(self)
# ######################################################################### # #########################################################################
# Generated enums 0 # Generated enums 0
# ######################################################################### # #########################################################################
@ -71,11 +80,18 @@ class TypeBuilder(type_builder.TypeBuilder):
# Generated classes 7 # Generated classes 7
# ######################################################################### # #########################################################################
class EdgeAst: class EdgeAst:
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute) _tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("Edge") self._bldr = _tb.class_("Edge")
self._properties: typing.Set[str] = set([ "source_node_id", "target_node_id", "relationship_name", ]) self._properties: typing.Set[str] = set(
[
"source_node_id",
"target_node_id",
"relationship_name",
]
)
self._props = EdgeProperties(self._bldr, self._properties) self._props = EdgeProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType: def type(self) -> baml_py.FieldType:
@ -90,10 +106,11 @@ class EdgeViewer(EdgeAst):
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb) super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]: def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties] return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class EdgeProperties: class EdgeProperties:
@ -101,8 +118,6 @@ class EdgeProperties:
self.__bldr = bldr self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821 self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property @property
def source_node_id(self) -> type_builder.ClassPropertyViewer: def source_node_id(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("source_node_id")) return type_builder.ClassPropertyViewer(self.__bldr.property("source_node_id"))
@ -116,13 +131,16 @@ class EdgeProperties:
return type_builder.ClassPropertyViewer(self.__bldr.property("relationship_name")) return type_builder.ClassPropertyViewer(self.__bldr.property("relationship_name"))
class KnowledgeGraphAst: class KnowledgeGraphAst:
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute) _tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("KnowledgeGraph") self._bldr = _tb.class_("KnowledgeGraph")
self._properties: typing.Set[str] = set([ "nodes", "edges", ]) self._properties: typing.Set[str] = set(
[
"nodes",
"edges",
]
)
self._props = KnowledgeGraphProperties(self._bldr, self._properties) self._props = KnowledgeGraphProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType: def type(self) -> baml_py.FieldType:
@ -137,10 +155,11 @@ class KnowledgeGraphViewer(KnowledgeGraphAst):
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb) super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]: def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties] return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class KnowledgeGraphProperties: class KnowledgeGraphProperties:
@ -148,8 +167,6 @@ class KnowledgeGraphProperties:
self.__bldr = bldr self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821 self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property @property
def nodes(self) -> type_builder.ClassPropertyViewer: def nodes(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("nodes")) return type_builder.ClassPropertyViewer(self.__bldr.property("nodes"))
@ -159,13 +176,18 @@ class KnowledgeGraphProperties:
return type_builder.ClassPropertyViewer(self.__bldr.property("edges")) return type_builder.ClassPropertyViewer(self.__bldr.property("edges"))
class NodeAst: class NodeAst:
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute) _tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("Node") self._bldr = _tb.class_("Node")
self._properties: typing.Set[str] = set([ "id", "name", "type", "description", ]) self._properties: typing.Set[str] = set(
[
"id",
"name",
"type",
"description",
]
)
self._props = NodeProperties(self._bldr, self._properties) self._props = NodeProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType: def type(self) -> baml_py.FieldType:
@ -180,7 +202,6 @@ class NodeBuilder(NodeAst):
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb) super().__init__(tb)
def add_property(self, name: str, type: baml_py.FieldType) -> baml_py.ClassPropertyBuilder: def add_property(self, name: str, type: baml_py.FieldType) -> baml_py.ClassPropertyBuilder:
if name in self._properties: if name in self._properties:
raise ValueError(f"Property {name} already exists.") raise ValueError(f"Property {name} already exists.")
@ -190,20 +211,16 @@ class NodeBuilder(NodeAst):
return [(name, self._bldr.property(name)) for name in self._properties] return [(name, self._bldr.property(name)) for name in self._properties]
class NodeProperties: class NodeProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]): def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821 self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
def __getattr__(self, name: str) -> baml_py.ClassPropertyBuilder: def __getattr__(self, name: str) -> baml_py.ClassPropertyBuilder:
if name not in self.__properties: if name not in self.__properties:
raise AttributeError(f"Property {name} not found.") raise AttributeError(f"Property {name} not found.")
return self.__bldr.property(name) return self.__bldr.property(name)
@property @property
def id(self) -> baml_py.ClassPropertyBuilder: def id(self) -> baml_py.ClassPropertyBuilder:
return self.__bldr.property("id") return self.__bldr.property("id")
@ -221,13 +238,18 @@ class NodeProperties:
return self.__bldr.property("description") return self.__bldr.property("description")
class SummarizedClassAst: class SummarizedClassAst:
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute) _tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedClass") self._bldr = _tb.class_("SummarizedClass")
self._properties: typing.Set[str] = set([ "name", "description", "methods", "decorators", ]) self._properties: typing.Set[str] = set(
[
"name",
"description",
"methods",
"decorators",
]
)
self._props = SummarizedClassProperties(self._bldr, self._properties) self._props = SummarizedClassProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType: def type(self) -> baml_py.FieldType:
@ -242,10 +264,11 @@ class SummarizedClassViewer(SummarizedClassAst):
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb) super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]: def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties] return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedClassProperties: class SummarizedClassProperties:
@ -253,8 +276,6 @@ class SummarizedClassProperties:
self.__bldr = bldr self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821 self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property @property
def name(self) -> type_builder.ClassPropertyViewer: def name(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("name")) return type_builder.ClassPropertyViewer(self.__bldr.property("name"))
@ -272,13 +293,21 @@ class SummarizedClassProperties:
return type_builder.ClassPropertyViewer(self.__bldr.property("decorators")) return type_builder.ClassPropertyViewer(self.__bldr.property("decorators"))
class SummarizedCodeAst: class SummarizedCodeAst:
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute) _tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedCode") self._bldr = _tb.class_("SummarizedCode")
self._properties: typing.Set[str] = set([ "high_level_summary", "key_features", "imports", "constants", "classes", "functions", "workflow_description", ]) self._properties: typing.Set[str] = set(
[
"high_level_summary",
"key_features",
"imports",
"constants",
"classes",
"functions",
"workflow_description",
]
)
self._props = SummarizedCodeProperties(self._bldr, self._properties) self._props = SummarizedCodeProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType: def type(self) -> baml_py.FieldType:
@ -293,10 +322,11 @@ class SummarizedCodeViewer(SummarizedCodeAst):
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb) super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]: def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties] return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedCodeProperties: class SummarizedCodeProperties:
@ -304,8 +334,6 @@ class SummarizedCodeProperties:
self.__bldr = bldr self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821 self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property @property
def high_level_summary(self) -> type_builder.ClassPropertyViewer: def high_level_summary(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("high_level_summary")) return type_builder.ClassPropertyViewer(self.__bldr.property("high_level_summary"))
@ -335,13 +363,16 @@ class SummarizedCodeProperties:
return type_builder.ClassPropertyViewer(self.__bldr.property("workflow_description")) return type_builder.ClassPropertyViewer(self.__bldr.property("workflow_description"))
class SummarizedContentAst: class SummarizedContentAst:
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute) _tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedContent") self._bldr = _tb.class_("SummarizedContent")
self._properties: typing.Set[str] = set([ "summary", "description", ]) self._properties: typing.Set[str] = set(
[
"summary",
"description",
]
)
self._props = SummarizedContentProperties(self._bldr, self._properties) self._props = SummarizedContentProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType: def type(self) -> baml_py.FieldType:
@ -356,10 +387,11 @@ class SummarizedContentViewer(SummarizedContentAst):
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb) super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]: def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties] return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedContentProperties: class SummarizedContentProperties:
@ -367,8 +399,6 @@ class SummarizedContentProperties:
self.__bldr = bldr self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821 self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property @property
def summary(self) -> type_builder.ClassPropertyViewer: def summary(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("summary")) return type_builder.ClassPropertyViewer(self.__bldr.property("summary"))
@ -378,13 +408,19 @@ class SummarizedContentProperties:
return type_builder.ClassPropertyViewer(self.__bldr.property("description")) return type_builder.ClassPropertyViewer(self.__bldr.property("description"))
class SummarizedFunctionAst: class SummarizedFunctionAst:
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute) _tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedFunction") self._bldr = _tb.class_("SummarizedFunction")
self._properties: typing.Set[str] = set([ "name", "description", "inputs", "outputs", "decorators", ]) self._properties: typing.Set[str] = set(
[
"name",
"description",
"inputs",
"outputs",
"decorators",
]
)
self._props = SummarizedFunctionProperties(self._bldr, self._properties) self._props = SummarizedFunctionProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType: def type(self) -> baml_py.FieldType:
@ -399,10 +435,11 @@ class SummarizedFunctionViewer(SummarizedFunctionAst):
def __init__(self, tb: type_builder.TypeBuilder): def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb) super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]: def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties] return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedFunctionProperties: class SummarizedFunctionProperties:
@ -410,8 +447,6 @@ class SummarizedFunctionProperties:
self.__bldr = bldr self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821 self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property @property
def name(self) -> type_builder.ClassPropertyViewer: def name(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("name")) return type_builder.ClassPropertyViewer(self.__bldr.property("name"))
@ -431,6 +466,3 @@ class SummarizedFunctionProperties:
@property @property
def decorators(self) -> type_builder.ClassPropertyViewer: def decorators(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("decorators")) return type_builder.ClassPropertyViewer(self.__bldr.property("decorators"))

View file

@ -15,27 +15,18 @@ from . import stream_types
type_map = { type_map = {
"types.Edge": types.Edge, "types.Edge": types.Edge,
"stream_types.Edge": stream_types.Edge, "stream_types.Edge": stream_types.Edge,
"types.KnowledgeGraph": types.KnowledgeGraph, "types.KnowledgeGraph": types.KnowledgeGraph,
"stream_types.KnowledgeGraph": stream_types.KnowledgeGraph, "stream_types.KnowledgeGraph": stream_types.KnowledgeGraph,
"types.Node": types.Node, "types.Node": types.Node,
"stream_types.Node": stream_types.Node, "stream_types.Node": stream_types.Node,
"types.SummarizedClass": types.SummarizedClass, "types.SummarizedClass": types.SummarizedClass,
"stream_types.SummarizedClass": stream_types.SummarizedClass, "stream_types.SummarizedClass": stream_types.SummarizedClass,
"types.SummarizedCode": types.SummarizedCode, "types.SummarizedCode": types.SummarizedCode,
"stream_types.SummarizedCode": stream_types.SummarizedCode, "stream_types.SummarizedCode": stream_types.SummarizedCode,
"types.SummarizedContent": types.SummarizedContent, "types.SummarizedContent": types.SummarizedContent,
"stream_types.SummarizedContent": stream_types.SummarizedContent, "stream_types.SummarizedContent": stream_types.SummarizedContent,
"types.SummarizedFunction": types.SummarizedFunction, "types.SummarizedFunction": types.SummarizedFunction,
"stream_types.SummarizedFunction": stream_types.SummarizedFunction, "stream_types.SummarizedFunction": stream_types.SummarizedFunction,
} }

View file

@ -20,22 +20,29 @@ from pydantic import BaseModel, ConfigDict
import baml_py import baml_py
CheckT = typing_extensions.TypeVar('CheckT') CheckT = typing_extensions.TypeVar("CheckT")
CheckName = typing_extensions.TypeVar('CheckName', bound=str) CheckName = typing_extensions.TypeVar("CheckName", bound=str)
class Check(BaseModel): class Check(BaseModel):
name: str name: str
expression: str expression: str
status: str status: str
class Checked(BaseModel, typing.Generic[CheckT, CheckName]): class Checked(BaseModel, typing.Generic[CheckT, CheckName]):
value: CheckT value: CheckT
checks: typing.Dict[CheckName, Check] checks: typing.Dict[CheckName, Check]
def get_checks(checks: typing.Dict[CheckName, Check]) -> typing.List[Check]: def get_checks(checks: typing.Dict[CheckName, Check]) -> typing.List[Check]:
return list(checks.values()) return list(checks.values())
def all_succeeded(checks: typing.Dict[CheckName, Check]) -> bool: def all_succeeded(checks: typing.Dict[CheckName, Check]) -> bool:
return all(check.status == "succeeded" for check in get_checks(checks)) return all(check.status == "succeeded" for check in get_checks(checks))
# ######################################################################### # #########################################################################
# Generated enums (0) # Generated enums (0)
# ######################################################################### # #########################################################################
@ -44,6 +51,7 @@ def all_succeeded(checks: typing.Dict[CheckName, Check]) -> bool:
# Generated classes (7) # Generated classes (7)
# ######################################################################### # #########################################################################
class Edge(BaseModel): class Edge(BaseModel):
# doc string for edge # doc string for edge
# doc string for source_node_id # doc string for source_node_id
@ -52,23 +60,27 @@ class Edge(BaseModel):
target_node_id: str target_node_id: str
relationship_name: str relationship_name: str
class KnowledgeGraph(BaseModel): class KnowledgeGraph(BaseModel):
nodes: typing.List["Node"] nodes: typing.List["Node"]
edges: typing.List["Edge"] edges: typing.List["Edge"]
class Node(BaseModel): class Node(BaseModel):
model_config = ConfigDict(extra='allow') model_config = ConfigDict(extra="allow")
id: str id: str
name: str name: str
type: str type: str
description: str description: str
class SummarizedClass(BaseModel): class SummarizedClass(BaseModel):
name: str name: str
description: str description: str
methods: typing.Optional[typing.List["SummarizedFunction"]] = None methods: typing.Optional[typing.List["SummarizedFunction"]] = None
decorators: typing.Optional[typing.List[str]] = None decorators: typing.Optional[typing.List[str]] = None
class SummarizedCode(BaseModel): class SummarizedCode(BaseModel):
high_level_summary: str high_level_summary: str
key_features: typing.List[str] key_features: typing.List[str]
@ -78,10 +90,12 @@ class SummarizedCode(BaseModel):
functions: typing.List["SummarizedFunction"] functions: typing.List["SummarizedFunction"]
workflow_description: typing.Optional[str] = None workflow_description: typing.Optional[str] = None
class SummarizedContent(BaseModel): class SummarizedContent(BaseModel):
summary: str summary: str
description: str description: str
class SummarizedFunction(BaseModel): class SummarizedFunction(BaseModel):
name: str name: str
description: str description: str
@ -89,6 +103,7 @@ class SummarizedFunction(BaseModel):
outputs: typing.Optional[typing.List[str]] = None outputs: typing.Optional[typing.List[str]] = None
decorators: typing.Optional[typing.List[str]] = None decorators: typing.Optional[typing.List[str]] = None
# ######################################################################### # #########################################################################
# Generated type aliases (0) # Generated type aliases (0)
# ######################################################################### # #########################################################################

View file

@ -5,6 +5,7 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import model_validator from pydantic import model_validator
from baml_py import ClientRegistry from baml_py import ClientRegistry
class LLMConfig(BaseSettings): class LLMConfig(BaseSettings):
""" """
Configuration settings for the LLM (Large Language Model) provider and related options. Configuration settings for the LLM (Large Language Model) provider and related options.
@ -54,11 +55,15 @@ class LLMConfig(BaseSettings):
def model_post_init(self, __context) -> None: def model_post_init(self, __context) -> None:
"""Initialize the BAML registry after the model is created.""" """Initialize the BAML registry after the model is created."""
self.baml_registry.add_llm_client(name=self.llm_provider, provider=self.llm_provider, options={ self.baml_registry.add_llm_client(
name=self.llm_provider,
provider=self.llm_provider,
options={
"model": self.llm_model, "model": self.llm_model,
"temperature": self.llm_temperature, "temperature": self.llm_temperature,
"api_key": self.llm_api_key "api_key": self.llm_api_key,
}) },
)
# Sets the primary client # Sets the primary client
self.baml_registry.set_primary(self.llm_provider) self.baml_registry.set_primary(self.llm_provider)

View file

@ -61,7 +61,9 @@ async def extract_code_summary(content: str):
config = get_llm_config() config = get_llm_config()
result = await b.SummarizeCode(content, baml_options={"tb": config.baml_registry}) result = await b.SummarizeCode(content, baml_options={"tb": config.baml_registry})
except Exception as e: except Exception as e:
logger.error("Failed to extract code summary with BAML, falling back to mock summary", exc_info=e) logger.error(
"Failed to extract code summary with BAML, falling back to mock summary", exc_info=e
)
result = get_mock_summarized_code() result = get_mock_summarized_code()
return result return result

View file

@ -2,14 +2,14 @@ import os
from typing import Type from typing import Type
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.baml.baml_client.async_client import b from cognee.infrastructure.llm.structured_output_framework.baml.baml_client.async_client import b
from cognee.infrastructure.llm.structured_output_framework.baml.baml_client.type_builder import TypeBuilder from cognee.infrastructure.llm.structured_output_framework.baml.baml_client.type_builder import (
TypeBuilder,
)
from cognee.infrastructure.llm.structured_output_framework.baml_src.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.baml_src.config import get_llm_config
from cognee.shared.logging_utils import get_logger, setup_logging from cognee.shared.logging_utils import get_logger, setup_logging
async def extract_content_graph(content: str, response_model: Type[BaseModel]): async def extract_content_graph(content: str, response_model: Type[BaseModel]):
# tb = TypeBuilder() # tb = TypeBuilder()
config = get_llm_config() config = get_llm_config()
@ -19,9 +19,8 @@ async def extract_content_graph(content: str, response_model: Type[BaseModel]):
# ([tb.literal_string("USA"), tb.literal_string("UK"), tb.literal_string("Germany"), tb.literal_string("other")]) # ([tb.literal_string("USA"), tb.literal_string("UK"), tb.literal_string("Germany"), tb.literal_string("other")])
# tb.Node.add_property("country", country) # tb.Node.add_property("country", country)
graph = await b.ExtractContentGraph(
content, mode="simple", baml_options={"tb": config.baml_registry}
graph = await b.ExtractContentGraph(content, mode="simple", baml_options={ "tb": config.baml_registry}) )
return graph return graph

View file

@ -1,7 +1,11 @@
from typing import Type from typing import Type
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
async def extract_categories(content: str, response_model: Type[BaseModel]): async def extract_categories(content: str, response_model: Type[BaseModel]):

View file

@ -5,8 +5,12 @@ from typing import Type
from instructor.exceptions import InstructorRetryException from instructor.exceptions import InstructorRetryException
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.shared.data_models import SummarizedCode from cognee.shared.data_models import SummarizedCode
from cognee.tasks.summarization.mock_summary import get_mock_summarized_code from cognee.tasks.summarization.mock_summary import get_mock_summarized_code

View file

@ -1,9 +1,15 @@
import os import os
from typing import Type from typing import Type
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt get_llm_client,
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config )
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
async def extract_content_graph(content: str, response_model: Type[BaseModel]): async def extract_content_graph(content: str, response_model: Type[BaseModel]):

View file

@ -3,9 +3,16 @@ from pydantic import BaseModel
import instructor import instructor
from cognee.exceptions import InvalidValueError from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt LLMInterface,
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import rate_limit_async, sleep_and_retry_async )
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async,
sleep_and_retry_async,
)
class AnthropicAdapter(LLMInterface): class AnthropicAdapter(LLMInterface):

View file

@ -6,7 +6,9 @@ import time
import asyncio import asyncio
import random import random
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
logger = get_logger() logger = get_logger()

View file

@ -6,8 +6,12 @@ from litellm import acompletion, JSONSchemaValidationError
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
from cognee.modules.observability.get_observe import get_observe from cognee.modules.observability.get_observe import get_observe
from cognee.exceptions import InvalidValueError from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import ( from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async, rate_limit_async,
sleep_and_retry_async, sleep_and_retry_async,

View file

@ -4,8 +4,13 @@ from typing import Type
from pydantic import BaseModel from pydantic import BaseModel
import instructor import instructor
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import rate_limit_async, sleep_and_retry_async LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async,
sleep_and_retry_async,
)
import litellm import litellm

View file

@ -4,7 +4,9 @@ from enum import Enum
from cognee.exceptions import InvalidValueError from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm import get_llm_config from cognee.infrastructure.llm import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.ollama.adapter import OllamaAPIAdapter from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.ollama.adapter import (
OllamaAPIAdapter,
)
# Define an Enum for LLM Providers # Define an Enum for LLM Providers
@ -59,7 +61,9 @@ def get_llm_client():
if llm_config.llm_api_key is None: if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.") raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.openai.adapter import OpenAIAdapter from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.openai.adapter import (
OpenAIAdapter,
)
return OpenAIAdapter( return OpenAIAdapter(
api_key=llm_config.llm_api_key, api_key=llm_config.llm_api_key,
@ -75,7 +79,9 @@ def get_llm_client():
if llm_config.llm_api_key is None: if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.") raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import GenericAPIAdapter from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import (
GenericAPIAdapter,
)
return OllamaAPIAdapter( return OllamaAPIAdapter(
llm_config.llm_endpoint, llm_config.llm_endpoint,
@ -86,7 +92,9 @@ def get_llm_client():
) )
elif provider == LLMProvider.ANTHROPIC: elif provider == LLMProvider.ANTHROPIC:
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.anthropic import AnthropicAdapter from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.anthropic import (
AnthropicAdapter,
)
return AnthropicAdapter(max_tokens=max_tokens, model=llm_config.llm_model) return AnthropicAdapter(max_tokens=max_tokens, model=llm_config.llm_model)
@ -94,7 +102,9 @@ def get_llm_client():
if llm_config.llm_api_key is None: if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.") raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import GenericAPIAdapter from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import (
GenericAPIAdapter,
)
return GenericAPIAdapter( return GenericAPIAdapter(
llm_config.llm_endpoint, llm_config.llm_endpoint,
@ -108,7 +118,9 @@ def get_llm_client():
if llm_config.llm_api_key is None: if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.") raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.gemini import GeminiAdapter from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.gemini import (
GeminiAdapter,
)
return GeminiAdapter( return GeminiAdapter(
api_key=llm_config.llm_api_key, api_key=llm_config.llm_api_key,

View file

@ -3,7 +3,9 @@
from typing import Type, Protocol from typing import Type, Protocol
from abc import abstractmethod from abc import abstractmethod
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
class LLMInterface(Protocol): class LLMInterface(Protocol):

View file

@ -1,7 +1,9 @@
from typing import Type from typing import Type
from pydantic import BaseModel from pydantic import BaseModel
import instructor import instructor
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import ( from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async, rate_limit_async,
rate_limit_sync, rate_limit_sync,

View file

@ -7,8 +7,12 @@ from pydantic import BaseModel
from cognee.modules.data.processing.document_types.open_data_file import open_data_file from cognee.modules.data.processing.document_types.open_data_file import open_data_file
from cognee.exceptions import InvalidValueError from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import ( from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async, rate_limit_async,
rate_limit_sync, rate_limit_sync,

View file

@ -49,7 +49,9 @@ from functools import wraps
from limits import RateLimitItemPerMinute, storage from limits import RateLimitItemPerMinute, storage
from limits.strategies import MovingWindowRateLimiter from limits.strategies import MovingWindowRateLimiter
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
logger = get_logger() logger = get_logger()

View file

@ -24,7 +24,9 @@ class GeminiTokenizer(TokenizerInterface):
# Get LLM API key from config # Get LLM API key from config
from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
config = get_embedding_config() config = get_embedding_config()
llm_config = get_llm_config() llm_config = get_llm_config()

View file

@ -1,6 +1,8 @@
import litellm import litellm
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
logger = get_logger() logger = get_logger()

View file

@ -1,4 +1,6 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.chunking.Chunker import Chunker from cognee.modules.chunking.Chunker import Chunker
from .Document import Document from .Document import Document

View file

@ -1,4 +1,6 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.chunking.Chunker import Chunker from cognee.modules.chunking.Chunker import Chunker
from .Document import Document from .Document import Document

View file

@ -69,7 +69,10 @@ async def cognee_pipeline(
cognee_pipeline.first_run = True cognee_pipeline.first_run = True
if cognee_pipeline.first_run: if cognee_pipeline.first_run:
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import test_llm_connection, test_embedding_connection from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
test_llm_connection,
test_embedding_connection,
)
# Test LLM and Embedding configuration once before running Cognee # Test LLM and Embedding configuration once before running Cognee
await test_llm_connection() await test_llm_connection()

View file

@ -6,8 +6,12 @@ from pydantic import BaseModel
from cognee.modules.retrieval.base_retriever import BaseRetriever from cognee.modules.retrieval.base_retriever import BaseRetriever
from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.vector import get_vector_engine from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
class CodeRetriever(BaseRetriever): class CodeRetriever(BaseRetriever):

View file

@ -1,9 +1,14 @@
from typing import Any, Optional, List, Type from typing import Any, Optional, List, Type
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever
from cognee.modules.retrieval.utils.completion import generate_completion from cognee.modules.retrieval.utils.completion import generate_completion
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
logger = get_logger() logger = get_logger()

View file

@ -2,8 +2,12 @@ from typing import Any, Optional
import logging import logging
from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.graph.networkx.adapter import NetworkXAdapter from cognee.infrastructure.databases.graph.networkx.adapter import NetworkXAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
)
from cognee.modules.retrieval.base_retriever import BaseRetriever from cognee.modules.retrieval.base_retriever import BaseRetriever
from cognee.modules.retrieval.exceptions import SearchTypeNotSupported from cognee.modules.retrieval.exceptions import SearchTypeNotSupported
from cognee.infrastructure.databases.graph.graph_db_interface import GraphDBInterface from cognee.infrastructure.databases.graph.graph_db_interface import GraphDBInterface

View file

@ -1,5 +1,10 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
async def generate_completion( async def generate_completion(

View file

@ -9,7 +9,9 @@ from cognee.modules.users.methods import get_default_user
from cognee.modules.users.models import User from cognee.modules.users.models import User
from cognee.shared.utils import send_telemetry from cognee.shared.utils import send_telemetry
from cognee.modules.search.methods import search from cognee.modules.search.methods import search
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
logger = get_logger(level=ERROR) logger = get_logger(level=ERROR)

View file

@ -4,7 +4,9 @@ from enum import Enum, auto
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
if get_llm_config().llm_provider.lower() == "gemini": if get_llm_config().llm_provider.lower() == "gemini":
""" """

View file

@ -7,7 +7,9 @@ from pydantic import BaseModel
from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.vector import get_vector_engine from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.engine.models import DataPoint from cognee.infrastructure.engine.models import DataPoint
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction.extract_categories import extract_categories from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction.extract_categories import (
extract_categories,
)
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk from cognee.modules.chunking.models.DocumentChunk import DocumentChunk

View file

@ -6,8 +6,13 @@ from pydantic import BaseModel
from cognee.infrastructure.entities.BaseEntityExtractor import BaseEntityExtractor from cognee.infrastructure.entities.BaseEntityExtractor import BaseEntityExtractor
from cognee.modules.engine.models import Entity from cognee.modules.engine.models import Entity
from cognee.modules.engine.models.EntityType import EntityType from cognee.modules.engine.models.EntityType import EntityType
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client read_query_prompt,
render_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
logger = get_logger("llm_entity_extractor") logger = get_logger("llm_entity_extractor")

View file

@ -1,8 +1,13 @@
from typing import List, Tuple from typing import List, Tuple
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt, read_query_prompt get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
read_query_prompt,
)
from cognee.root_dir import get_absolute_path from cognee.root_dir import get_absolute_path

View file

@ -1,6 +1,11 @@
from typing import List from typing import List
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt, read_query_prompt get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
read_query_prompt,
)
from cognee.shared.data_models import KnowledgeGraph from cognee.shared.data_models import KnowledgeGraph
from cognee.root_dir import get_absolute_path from cognee.root_dir import get_absolute_path

View file

@ -1,8 +1,13 @@
from typing import List from typing import List
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt, read_query_prompt get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
read_query_prompt,
)
from cognee.root_dir import get_absolute_path from cognee.root_dir import get_absolute_path

View file

@ -1,19 +1,26 @@
import asyncio import asyncio
from typing import Type, List from typing import Type, List
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_content_graph from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_content_graph,
)
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
from cognee.tasks.storage import add_data_points from cognee.tasks.storage import add_data_points
from cognee.base_config import get_base_config from cognee.base_config import get_base_config
base = get_base_config() base = get_base_config()
if base.structured_output_framework == 'BAML': if base.structured_output_framework == "BAML":
print(f"Using BAML framework: {base.structured_output_framework}") print(f"Using BAML framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_content_graph from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_content_graph,
)
else: else:
print(f"Using llitellm_instructor framework: {base.structured_output_framework}") print(f"Using llitellm_instructor framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_content_graph from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_content_graph,
)
async def extract_graph_from_code( async def extract_graph_from_code(
data_chunks: list[DocumentChunk], graph_model: Type[BaseModel] data_chunks: list[DocumentChunk], graph_model: Type[BaseModel]

View file

@ -7,13 +7,18 @@ from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
from cognee.base_config import get_base_config from cognee.base_config import get_base_config
base = get_base_config() base = get_base_config()
if base.structured_output_framework == 'BAML': if base.structured_output_framework == "BAML":
print(f"Using BAML framework: {base.structured_output_framework}") print(f"Using BAML framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_content_graph from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_content_graph,
)
else: else:
print(f"Using llitellm_instructor framework: {base.structured_output_framework}") print(f"Using llitellm_instructor framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_content_graph from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_content_graph,
)
from cognee.modules.graph.utils import ( from cognee.modules.graph.utils import (
expand_with_nodes_and_edges, expand_with_nodes_and_edges,

View file

@ -15,8 +15,12 @@ from pydantic import BaseModel
from cognee.modules.graph.exceptions import EntityNotFoundError from cognee.modules.graph.exceptions import EntityNotFoundError
from cognee.modules.ingestion.exceptions import IngestionError from cognee.modules.ingestion.exceptions import IngestionError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.data.chunking.config import get_chunk_config from cognee.infrastructure.data.chunking.config import get_chunk_config
from cognee.infrastructure.data.chunking.get_chunking_engine import get_chunk_engine from cognee.infrastructure.data.chunking.get_chunking_engine import get_chunk_engine
from cognee.infrastructure.databases.graph.get_graph_engine import get_graph_engine from cognee.infrastructure.databases.graph.get_graph_engine import get_graph_engine

View file

@ -6,12 +6,18 @@ from cognee.infrastructure.engine import DataPoint
from cognee.base_config import get_base_config from cognee.base_config import get_base_config
base = get_base_config() base = get_base_config()
if base.structured_output_framework == 'BAML': if base.structured_output_framework == "BAML":
print(f"Using BAML framework for code summarization: {base.structured_output_framework}") print(f"Using BAML framework for code summarization: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_code_summary from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_code_summary,
)
else: else:
print(f"Using llitellm_instructor framework for code summarization: {base.structured_output_framework}") print(
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_code_summary f"Using llitellm_instructor framework for code summarization: {base.structured_output_framework}"
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_code_summary,
)
from .models import CodeSummary from .models import CodeSummary

View file

@ -5,12 +5,18 @@ from pydantic import BaseModel
from cognee.base_config import get_base_config from cognee.base_config import get_base_config
base = get_base_config() base = get_base_config()
if base.structured_output_framework == 'BAML': if base.structured_output_framework == "BAML":
print(f"Using BAML framework for text summarization: {base.structured_output_framework}") print(f"Using BAML framework for text summarization: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_summary from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_summary,
)
else: else:
print(f"Using llitellm_instructor framework for text summarization: {base.structured_output_framework}") print(
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_summary f"Using llitellm_instructor framework for text summarization: {base.structured_output_framework}"
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_summary,
)
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
from cognee.modules.cognify.config import get_cognify_config from cognee.modules.cognify.config import get_cognify_config

View file

@ -3,8 +3,12 @@ import time
import asyncio import asyncio
import logging import logging
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import EmbeddingRateLimiter get_llm_config,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import (
EmbeddingRateLimiter,
)
from cognee.tests.unit.infrastructure.mock_embedding_engine import MockEmbeddingEngine from cognee.tests.unit.infrastructure.mock_embedding_engine import MockEmbeddingEngine
# Configure logging # Configure logging

View file

@ -2,8 +2,12 @@ import asyncio
import os import os
from unittest.mock import patch from unittest.mock import patch
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import llm_rate_limiter from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config llm_rate_limiter,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
async def test_rate_limiting_realistic(): async def test_rate_limiting_realistic():

View file

@ -3,7 +3,9 @@ import logging
import cognee import cognee
import asyncio import asyncio
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from dotenv import load_dotenv from dotenv import load_dotenv
from cognee.api.v1.search import SearchType from cognee.api.v1.search import SearchType
from cognee.modules.engine.models import NodeSet from cognee.modules.engine.models import NodeSet

View file

@ -12,8 +12,13 @@ from cognee.tasks.temporal_awareness.index_graphiti_objects import (
) )
from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search
from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client read_query_prompt,
render_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.users.methods import get_default_user from cognee.modules.users.methods import get_default_user
text_list = [ text_list = [