ruff format

This commit is contained in:
vasilije 2025-07-06 18:05:06 +02:00
parent ea035a1bce
commit 53b8df8506
61 changed files with 1619 additions and 610 deletions

View file

@ -2,9 +2,13 @@ from uuid import NAMESPACE_OID, uuid5
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
)
from cognee.low_level import DataPoint
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.shared.logging_utils import get_logger
from cognee.modules.engine.models import NodeSet
from cognee.tasks.storage import add_data_points, index_graph_edges

View file

@ -7,7 +7,9 @@ from cognee.modules.cognify.config import get_cognify_config
from cognee.infrastructure.data.chunking.config import get_chunk_config
from cognee.infrastructure.databases.vector import get_vectordb_config
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from cognee.infrastructure.databases.relational import get_relational_config, get_migration_config
from cognee.infrastructure.files.storage import LocalStorage

View file

@ -17,7 +17,9 @@ from cognee.api.v1.responses.models import (
)
from cognee.api.v1.responses.dispatch_function import dispatch_function
from cognee.api.v1.responses.default_tools import DEFAULT_TOOLS
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from cognee.modules.users.models import User
from cognee.modules.users.methods import get_authenticated_user

View file

@ -1,8 +1,13 @@
from typing import Any, Dict, List
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.eval_framework.evaluation.base_eval_adapter import BaseEvalAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
from cognee.eval_framework.eval_config import EvalConfig

View file

@ -5,7 +5,9 @@ import litellm
import os
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import TikTokenTokenizer
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import (
TikTokenTokenizer,
)
litellm.set_verbose = False
logger = get_logger("FastembedEmbeddingEngine")

View file

@ -7,10 +7,18 @@ import litellm
import os
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.Gemini import GeminiTokenizer
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import HuggingFaceTokenizer
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer import MistralTokenizer
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import TikTokenTokenizer
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.Gemini import (
GeminiTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import (
HuggingFaceTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer import (
MistralTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.TikToken import (
TikTokenTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import (
embedding_rate_limit_async,
embedding_sleep_and_retry_async,

View file

@ -7,7 +7,9 @@ import os
import aiohttp.http_exceptions
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import HuggingFaceTokenizer
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.tokenizer.HuggingFace import (
HuggingFaceTokenizer,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import (
embedding_rate_limit_async,
embedding_sleep_and_retry_async,

View file

@ -1,5 +1,7 @@
from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from .EmbeddingEngine import EmbeddingEngine
from functools import lru_cache

View file

@ -1,4 +1,12 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import get_max_chunk_tokens
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import test_llm_connection
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import test_embedding_connection
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
get_max_chunk_tokens,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
test_llm_connection,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
test_embedding_connection,
)

View file

@ -13,9 +13,9 @@
__version__ = "0.201.0"
try:
from baml_py.safe_import import EnsureBamlPyImport
from baml_py.safe_import import EnsureBamlPyImport
except ImportError:
raise ImportError(f"""Update to baml-py required.
raise ImportError(f"""Update to baml-py required.
Version of baml_client generator (see generators.baml): {__version__}
Please upgrade baml-py to version "{__version__}".
@ -31,16 +31,15 @@ https://boundaryml.com/discord
with EnsureBamlPyImport(__version__) as e:
e.raise_if_incompatible_version(__version__)
e.raise_if_incompatible_version(__version__)
from . import types
from . import tracing
from . import stream_types
from . import config
from .config import reset_baml_env_vars
from .sync_client import b
from . import types
from . import tracing
from . import stream_types
from . import config
from .config import reset_baml_env_vars
from .sync_client import b
# FOR LEGACY COMPATIBILITY, expose "partial_types" as an alias for "stream_types"
@ -48,11 +47,11 @@ with EnsureBamlPyImport(__version__) as e:
partial_types = stream_types
__all__ = [
"b",
"stream_types",
"partial_types",
"tracing",
"types",
"reset_baml_env_vars",
"config",
]
"b",
"stream_types",
"partial_types",
"tracing",
"types",
"reset_baml_env_vars",
"config",
]

View file

@ -36,10 +36,13 @@ class BamlAsyncClient:
self.__llm_response_parser = LlmResponseParser(options)
self.__llm_stream_parser = LlmStreamParser(options)
def with_options(self,
def with_options(
self,
tb: typing.Optional[type_builder.TypeBuilder] = None,
client_registry: typing.Optional[baml_py.baml_py.ClientRegistry] = None,
collector: typing.Optional[typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]] = None,
collector: typing.Optional[
typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]
] = None,
env: typing.Optional[typing.Dict[str, typing.Optional[str]]] = None,
) -> "BamlAsyncClient":
options: BamlCallOptions = {}
@ -55,67 +58,151 @@ class BamlAsyncClient:
@property
def stream(self):
return self.__stream_client
return self.__stream_client
@property
def request(self):
return self.__http_request
return self.__http_request
@property
def stream_request(self):
return self.__http_stream_request
return self.__http_stream_request
@property
def parse(self):
return self.__llm_response_parser
return self.__llm_response_parser
@property
def parse_stream(self):
return self.__llm_stream_parser
async def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
return self.__llm_stream_parser
async def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
async def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
result = await self.__options.merge_options(baml_options).call_function_async(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
async def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
result = await self.__options.merge_options(baml_options).call_function_async(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
async def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
result = await self.__options.merge_options(baml_options).call_function_async(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
async def SummarizeCode(self, content: str,
result = await self.__options.merge_options(baml_options).call_function_async(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedCode:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="SummarizeCode", args={
"content": content,
})
return typing.cast(types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__))
async def SummarizeContent(self, content: str,
result = await self.__options.merge_options(baml_options).call_function_async(
function_name="SummarizeCode",
args={
"content": content,
},
)
return typing.cast(
types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__)
)
async def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedContent:
result = await self.__options.merge_options(baml_options).call_function_async(function_name="SummarizeContent", args={
"content": content,
})
return typing.cast(types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__))
result = await self.__options.merge_options(baml_options).call_function_async(
function_name="SummarizeContent",
args={
"content": content,
},
)
return typing.cast(
types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__)
)
class BamlStreamClient:
@ -124,79 +211,182 @@ class BamlStreamClient:
def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
})
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
)
def SummarizeCode(self, content: str,
return baml_py.BamlStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.SummarizedCode, types.SummarizedCode]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="SummarizeCode", args={
"content": content,
})
return baml_py.BamlStream[stream_types.SummarizedCode, types.SummarizedCode](
result,
lambda x: typing.cast(stream_types.SummarizedCode, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="SummarizeCode",
args={
"content": content,
},
)
def SummarizeContent(self, content: str,
return baml_py.BamlStream[stream_types.SummarizedCode, types.SummarizedCode](
result,
lambda x: typing.cast(
stream_types.SummarizedCode,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlStream[stream_types.SummarizedContent, types.SummarizedContent]:
ctx, result = self.__options.merge_options(baml_options).create_async_stream(function_name="SummarizeContent", args={
"content": content,
})
return baml_py.BamlStream[stream_types.SummarizedContent, types.SummarizedContent](
result,
lambda x: typing.cast(stream_types.SummarizedContent, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_async_stream(
function_name="SummarizeContent",
args={
"content": content,
},
)
return baml_py.BamlStream[stream_types.SummarizedContent, types.SummarizedContent](
result,
lambda x: typing.cast(
stream_types.SummarizedContent,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
class BamlHttpRequestClient:
__options: DoNotUseDirectlyCallManager
@ -204,49 +394,128 @@ class BamlHttpRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options
async def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
async def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
async def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
async def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
async def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
async def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
async def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
async def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="request")
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="request",
)
return result
async def SummarizeCode(self, content: str,
async def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeCode", args={
"content": content,
}, mode="request")
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeCode",
args={
"content": content,
},
mode="request",
)
return result
async def SummarizeContent(self, content: str,
async def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeContent", args={
"content": content,
}, mode="request")
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeContent",
args={
"content": content,
},
mode="request",
)
return result
class BamlHttpStreamRequestClient:
__options: DoNotUseDirectlyCallManager
@ -254,48 +523,127 @@ class BamlHttpStreamRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options
async def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
async def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
async def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
async def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
async def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="stream")
return result
async def SummarizeCode(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeCode", args={
"content": content,
}, mode="stream")
return result
async def SummarizeContent(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(function_name="SummarizeContent", args={
"content": content,
}, mode="stream")
return result
b = BamlAsyncClient(DoNotUseDirectlyCallManager({}))
async def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
async def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
async def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="stream",
)
return result
async def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeCode",
args={
"content": content,
},
mode="stream",
)
return result
async def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = await self.__options.merge_options(baml_options).create_http_request_async(
function_name="SummarizeContent",
args={
"content": content,
},
mode="stream",
)
return result
b = BamlAsyncClient(DoNotUseDirectlyCallManager({}))

View file

@ -19,17 +19,19 @@ from .inlinedbaml import get_baml_files
from typing import Dict
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME = BamlRuntime.from_files(
"baml_src",
get_baml_files(),
os.environ.copy()
"baml_src", get_baml_files(), os.environ.copy()
)
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX = BamlCtxManager(DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME)
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX = BamlCtxManager(
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME
)
def reset_baml_env_vars(env_vars: Dict[str, str]):
warnings.warn(
"reset_baml_env_vars is deprecated and should be removed. Environment variables are now lazily loaded on each function call",
DeprecationWarning,
stacklevel=2
stacklevel=2,
)
__all__ = []

File diff suppressed because one or more lines are too long

View file

@ -16,6 +16,7 @@ import typing_extensions
from . import stream_types, types
from .runtime import DoNotUseDirectlyCallManager, BamlCallOptions
class LlmResponseParser:
__options: DoNotUseDirectlyCallManager
@ -23,42 +24,69 @@ class LlmResponseParser:
self.__options = options
def ExtractContentGraph(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraph", llm_response=llm_response, mode="request")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraph", llm_response=llm_response, mode="request"
)
return typing.cast(types.KnowledgeGraph, result)
def ExtractContentGraphGeneric(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="request")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="request"
)
return typing.cast(types.KnowledgeGraph, result)
def ExtractContentGraphWithAnthropic(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithAnthropic", llm_response=llm_response, mode="request")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithAnthropic",
llm_response=llm_response,
mode="request",
)
return typing.cast(types.KnowledgeGraph, result)
def ExtractContentGraphWithEnvPrompt(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithEnvPrompt", llm_response=llm_response, mode="request")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithEnvPrompt",
llm_response=llm_response,
mode="request",
)
return typing.cast(types.KnowledgeGraph, result)
def SummarizeCode(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedCode:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeCode", llm_response=llm_response, mode="request")
result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeCode", llm_response=llm_response, mode="request"
)
return typing.cast(types.SummarizedCode, result)
def SummarizeContent(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedContent:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeContent", llm_response=llm_response, mode="request")
result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeContent", llm_response=llm_response, mode="request"
)
return typing.cast(types.SummarizedContent, result)
class LlmStreamParser:
__options: DoNotUseDirectlyCallManager
@ -67,39 +95,65 @@ class LlmStreamParser:
self.__options = options
def ExtractContentGraph(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraph", llm_response=llm_response, mode="stream")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraph", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.KnowledgeGraph, result)
def ExtractContentGraphGeneric(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="stream")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphGeneric", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.KnowledgeGraph, result)
def ExtractContentGraphWithAnthropic(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithAnthropic", llm_response=llm_response, mode="stream")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithAnthropic",
llm_response=llm_response,
mode="stream",
)
return typing.cast(stream_types.KnowledgeGraph, result)
def ExtractContentGraphWithEnvPrompt(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).parse_response(function_name="ExtractContentGraphWithEnvPrompt", llm_response=llm_response, mode="stream")
result = self.__options.merge_options(baml_options).parse_response(
function_name="ExtractContentGraphWithEnvPrompt",
llm_response=llm_response,
mode="stream",
)
return typing.cast(stream_types.KnowledgeGraph, result)
def SummarizeCode(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.SummarizedCode:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeCode", llm_response=llm_response, mode="stream")
result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeCode", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.SummarizedCode, result)
def SummarizeContent(
self, llm_response: str, baml_options: BamlCallOptions = {},
self,
llm_response: str,
baml_options: BamlCallOptions = {},
) -> stream_types.SummarizedContent:
result = self.__options.merge_options(baml_options).parse_response(function_name="SummarizeContent", llm_response=llm_response, mode="stream")
result = self.__options.merge_options(baml_options).parse_response(
function_name="SummarizeContent", llm_response=llm_response, mode="stream"
)
return typing.cast(stream_types.SummarizedContent, result)

View file

@ -17,7 +17,10 @@ import typing_extensions
import baml_py
from . import types, stream_types, type_builder
from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME as __runtime__, DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX as __ctx__manager__
from .globals import (
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME as __runtime__,
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX as __ctx__manager__,
)
class BamlCallOptions(typing.TypedDict, total=False):
@ -48,9 +51,6 @@ class _ResolvedBamlOptions:
self.env_vars = env_vars
class DoNotUseDirectlyCallManager:
def __init__(self, baml_options: BamlCallOptions):
self.__baml_options = baml_options
@ -74,7 +74,9 @@ class DoNotUseDirectlyCallManager:
collectors_as_list = (
collector
if isinstance(collector, list)
else [collector] if collector is not None else []
else [collector]
if collector is not None
else []
)
env_vars = os.environ.copy()
for k, v in self.__baml_options.get("env", {}).items():
@ -164,7 +166,9 @@ class DoNotUseDirectlyCallManager:
*,
function_name: str,
args: typing.Dict[str, typing.Any],
) -> typing.Tuple[baml_py.baml_py.RuntimeContextManager, baml_py.baml_py.SyncFunctionResultStream]:
) -> typing.Tuple[
baml_py.baml_py.RuntimeContextManager, baml_py.baml_py.SyncFunctionResultStream
]:
resolved_options = self.__resolve()
ctx = __ctx__manager__.get()
result = __runtime__.stream_function_sync(
@ -172,7 +176,7 @@ class DoNotUseDirectlyCallManager:
args,
# this is always None, we set this later!
# on_event
None,
None,
# ctx
ctx,
# tb
@ -232,7 +236,13 @@ class DoNotUseDirectlyCallManager:
mode == "stream",
)
def parse_response(self, *, function_name: str, llm_response: str, mode: typing_extensions.Literal["stream", "request"]) -> typing.Any:
def parse_response(
self,
*,
function_name: str,
llm_response: str,
mode: typing_extensions.Literal["stream", "request"],
) -> typing.Any:
resolved_options = self.__resolve()
return __runtime__.parse_llm_response(
function_name,
@ -253,4 +263,4 @@ class DoNotUseDirectlyCallManager:
resolved_options.client_registry,
# env_vars
resolved_options.env_vars,
)
)

View file

@ -18,14 +18,19 @@ import baml_py
from . import types
StreamStateValueT = typing.TypeVar('StreamStateValueT')
StreamStateValueT = typing.TypeVar("StreamStateValueT")
class StreamState(BaseModel, typing.Generic[StreamStateValueT]):
value: StreamStateValueT
state: typing_extensions.Literal["Pending", "Incomplete", "Complete"]
# #########################################################################
# Generated classes (7)
# #########################################################################
class Edge(BaseModel):
# doc string for edge
# doc string for source_node_id
@ -34,23 +39,27 @@ class Edge(BaseModel):
target_node_id: typing.Optional[str] = None
relationship_name: typing.Optional[str] = None
class KnowledgeGraph(BaseModel):
nodes: typing.List["types.Node"]
edges: typing.List["Edge"]
class Node(BaseModel):
model_config = ConfigDict(extra='allow')
model_config = ConfigDict(extra="allow")
id: typing.Optional[str] = None
name: typing.Optional[str] = None
type: typing.Optional[str] = None
description: typing.Optional[str] = None
class SummarizedClass(BaseModel):
name: typing.Optional[str] = None
description: typing.Optional[str] = None
methods: typing.Optional[typing.List["SummarizedFunction"]] = None
decorators: typing.Optional[typing.List[str]] = None
class SummarizedCode(BaseModel):
high_level_summary: typing.Optional[str] = None
key_features: typing.List[str]
@ -60,10 +69,12 @@ class SummarizedCode(BaseModel):
functions: typing.List["SummarizedFunction"]
workflow_description: typing.Optional[str] = None
class SummarizedContent(BaseModel):
summary: typing.Optional[str] = None
description: typing.Optional[str] = None
class SummarizedFunction(BaseModel):
name: typing.Optional[str] = None
description: typing.Optional[str] = None
@ -71,6 +82,7 @@ class SummarizedFunction(BaseModel):
outputs: typing.Optional[typing.List[str]] = None
decorators: typing.Optional[typing.List[str]] = None
# #########################################################################
# Generated type aliases (0)
# #########################################################################

View file

@ -19,6 +19,7 @@ from .parser import LlmResponseParser, LlmStreamParser
from .runtime import DoNotUseDirectlyCallManager, BamlCallOptions
from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME as __runtime__
class BamlSyncClient:
__options: DoNotUseDirectlyCallManager
__stream_client: "BamlStreamClient"
@ -48,10 +49,13 @@ class BamlSyncClient:
self.__llm_response_parser = LlmResponseParser(self.__options)
self.__llm_stream_parser = LlmStreamParser(self.__options)
def with_options(self,
def with_options(
self,
tb: typing.Optional[type_builder.TypeBuilder] = None,
client_registry: typing.Optional[baml_py.baml_py.ClientRegistry] = None,
collector: typing.Optional[typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]] = None,
collector: typing.Optional[
typing.Union[baml_py.baml_py.Collector, typing.List[baml_py.baml_py.Collector]]
] = None,
env: typing.Optional[typing.Dict[str, typing.Optional[str]]] = None,
) -> "BamlSyncClient":
options: BamlCallOptions = {}
@ -67,67 +71,151 @@ class BamlSyncClient:
@property
def stream(self):
return self.__stream_client
return self.__stream_client
@property
def request(self):
return self.__http_request
return self.__http_request
@property
def stream_request(self):
return self.__http_stream_request
return self.__http_stream_request
@property
def parse(self):
return self.__llm_response_parser
return self.__llm_response_parser
@property
def parse_stream(self):
return self.__llm_stream_parser
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
return self.__llm_stream_parser
def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
result = self.__options.merge_options(baml_options).call_function_sync(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
result = self.__options.merge_options(baml_options).call_function_sync(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
result = self.__options.merge_options(baml_options).call_function_sync(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> types.KnowledgeGraph:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
})
return typing.cast(types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__))
def SummarizeCode(self, content: str,
result = self.__options.merge_options(baml_options).call_function_sync(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
)
return typing.cast(
types.KnowledgeGraph, result.cast_to(types, types, stream_types, False, __runtime__)
)
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedCode:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="SummarizeCode", args={
"content": content,
})
return typing.cast(types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__))
def SummarizeContent(self, content: str,
result = self.__options.merge_options(baml_options).call_function_sync(
function_name="SummarizeCode",
args={
"content": content,
},
)
return typing.cast(
types.SummarizedCode, result.cast_to(types, types, stream_types, False, __runtime__)
)
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> types.SummarizedContent:
result = self.__options.merge_options(baml_options).call_function_sync(function_name="SummarizeContent", args={
"content": content,
})
return typing.cast(types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__))
result = self.__options.merge_options(baml_options).call_function_sync(
function_name="SummarizeContent",
args={
"content": content,
},
)
return typing.cast(
types.SummarizedContent, result.cast_to(types, types, stream_types, False, __runtime__)
)
class BamlStreamClient:
@ -136,79 +224,182 @@ class BamlStreamClient:
def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
})
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
)
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
})
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(stream_types.KnowledgeGraph, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
)
def SummarizeCode(self, content: str,
return baml_py.BamlSyncStream[stream_types.KnowledgeGraph, types.KnowledgeGraph](
result,
lambda x: typing.cast(
stream_types.KnowledgeGraph,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.KnowledgeGraph, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.SummarizedCode, types.SummarizedCode]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="SummarizeCode", args={
"content": content,
})
return baml_py.BamlSyncStream[stream_types.SummarizedCode, types.SummarizedCode](
result,
lambda x: typing.cast(stream_types.SummarizedCode, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="SummarizeCode",
args={
"content": content,
},
)
def SummarizeContent(self, content: str,
return baml_py.BamlSyncStream[stream_types.SummarizedCode, types.SummarizedCode](
result,
lambda x: typing.cast(
stream_types.SummarizedCode,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedCode, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.BamlSyncStream[stream_types.SummarizedContent, types.SummarizedContent]:
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(function_name="SummarizeContent", args={
"content": content,
})
return baml_py.BamlSyncStream[stream_types.SummarizedContent, types.SummarizedContent](
result,
lambda x: typing.cast(stream_types.SummarizedContent, x.cast_to(types, types, stream_types, True, __runtime__)),
lambda x: typing.cast(types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)),
ctx,
ctx, result = self.__options.merge_options(baml_options).create_sync_stream(
function_name="SummarizeContent",
args={
"content": content,
},
)
return baml_py.BamlSyncStream[stream_types.SummarizedContent, types.SummarizedContent](
result,
lambda x: typing.cast(
stream_types.SummarizedContent,
x.cast_to(types, types, stream_types, True, __runtime__),
),
lambda x: typing.cast(
types.SummarizedContent, x.cast_to(types, types, stream_types, False, __runtime__)
),
ctx,
)
class BamlHttpRequestClient:
__options: DoNotUseDirectlyCallManager
@ -216,49 +407,128 @@ class BamlHttpRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="request")
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="request",
)
return result
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="request")
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="request",
)
return result
def SummarizeCode(self, content: str,
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeCode", args={
"content": content,
}, mode="request")
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeCode",
args={
"content": content,
},
mode="request",
)
return result
def SummarizeContent(self, content: str,
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeContent", args={
"content": content,
}, mode="request")
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeContent",
args={
"content": content,
},
mode="request",
)
return result
class BamlHttpStreamRequestClient:
__options: DoNotUseDirectlyCallManager
@ -266,48 +536,127 @@ class BamlHttpStreamRequestClient:
def __init__(self, options: DoNotUseDirectlyCallManager):
self.__options = options
def ExtractContentGraph(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
def ExtractContentGraph(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraph", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraph",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
def ExtractContentGraphGeneric(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphGeneric", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
def ExtractContentGraphWithAnthropic(self, content: str,mode: typing.Optional[typing.Union[typing_extensions.Literal['simple'], typing_extensions.Literal['base'], typing_extensions.Literal['guided'], typing_extensions.Literal['strict'], typing_extensions.Literal['custom']]] = None,custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithAnthropic", args={
"content": content,"mode": mode,"custom_prompt_content": custom_prompt_content,
}, mode="stream")
return result
def ExtractContentGraphWithEnvPrompt(self, content: str,prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="ExtractContentGraphWithEnvPrompt", args={
"content": content,"prompt_override": prompt_override,
}, mode="stream")
return result
def SummarizeCode(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeCode", args={
"content": content,
}, mode="stream")
return result
def SummarizeContent(self, content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(function_name="SummarizeContent", args={
"content": content,
}, mode="stream")
return result
b = BamlSyncClient(DoNotUseDirectlyCallManager({}))
def ExtractContentGraphGeneric(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphGeneric",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
def ExtractContentGraphWithAnthropic(
self,
content: str,
mode: typing.Optional[
typing.Union[
typing_extensions.Literal["simple"],
typing_extensions.Literal["base"],
typing_extensions.Literal["guided"],
typing_extensions.Literal["strict"],
typing_extensions.Literal["custom"],
]
] = None,
custom_prompt_content: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithAnthropic",
args={
"content": content,
"mode": mode,
"custom_prompt_content": custom_prompt_content,
},
mode="stream",
)
return result
def ExtractContentGraphWithEnvPrompt(
self,
content: str,
prompt_override: typing.Optional[str] = None,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="ExtractContentGraphWithEnvPrompt",
args={
"content": content,
"prompt_override": prompt_override,
},
mode="stream",
)
return result
def SummarizeCode(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeCode",
args={
"content": content,
},
mode="stream",
)
return result
def SummarizeContent(
self,
content: str,
baml_options: BamlCallOptions = {},
) -> baml_py.baml_py.HTTPRequest:
result = self.__options.merge_options(baml_options).create_http_request_sync(
function_name="SummarizeContent",
args={
"content": content,
},
mode="stream",
)
return result
b = BamlSyncClient(DoNotUseDirectlyCallManager({}))

View file

@ -14,9 +14,13 @@ from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX
trace = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.trace_fn
set_tags = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.upsert_tags
def flush():
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.flush()
DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.flush()
on_log_event = DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_CTX.on_log_event
__all__ = ['trace', 'set_tags', "flush", "on_log_event"]
__all__ = ["trace", "set_tags", "flush", "on_log_event"]

View file

@ -15,19 +15,29 @@ from baml_py import type_builder
from baml_py import baml_py
from .globals import DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME
class TypeBuilder(type_builder.TypeBuilder):
def __init__(self):
super().__init__(classes=set(
["Edge","KnowledgeGraph","Node","SummarizedClass","SummarizedCode","SummarizedContent","SummarizedFunction",]
), enums=set(
[]
), runtime=DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME)
super().__init__(
classes=set(
[
"Edge",
"KnowledgeGraph",
"Node",
"SummarizedClass",
"SummarizedCode",
"SummarizedContent",
"SummarizedFunction",
]
),
enums=set([]),
runtime=DO_NOT_USE_DIRECTLY_UNLESS_YOU_KNOW_WHAT_YOURE_DOING_RUNTIME,
)
# #########################################################################
# Generated enums 0
# #########################################################################
# #########################################################################
# Generated classes 7
# #########################################################################
@ -61,7 +71,6 @@ class TypeBuilder(type_builder.TypeBuilder):
return SummarizedFunctionViewer(self)
# #########################################################################
# Generated enums 0
# #########################################################################
@ -71,11 +80,18 @@ class TypeBuilder(type_builder.TypeBuilder):
# Generated classes 7
# #########################################################################
class EdgeAst:
def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute)
_tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("Edge")
self._properties: typing.Set[str] = set([ "source_node_id", "target_node_id", "relationship_name", ])
self._properties: typing.Set[str] = set(
[
"source_node_id",
"target_node_id",
"relationship_name",
]
)
self._props = EdgeProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType:
@ -90,39 +106,41 @@ class EdgeViewer(EdgeAst):
def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties]
return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class EdgeProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property
def source_node_id(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("source_node_id"))
@property
def target_node_id(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("target_node_id"))
@property
def relationship_name(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("relationship_name"))
class KnowledgeGraphAst:
def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute)
_tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("KnowledgeGraph")
self._properties: typing.Set[str] = set([ "nodes", "edges", ])
self._properties: typing.Set[str] = set(
[
"nodes",
"edges",
]
)
self._props = KnowledgeGraphProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType:
@ -137,35 +155,39 @@ class KnowledgeGraphViewer(KnowledgeGraphAst):
def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties]
return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class KnowledgeGraphProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property
def nodes(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("nodes"))
@property
def edges(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("edges"))
class NodeAst:
def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute)
_tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("Node")
self._properties: typing.Set[str] = set([ "id", "name", "type", "description", ])
self._properties: typing.Set[str] = set(
[
"id",
"name",
"type",
"description",
]
)
self._props = NodeProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType:
@ -180,7 +202,6 @@ class NodeBuilder(NodeAst):
def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb)
def add_property(self, name: str, type: baml_py.FieldType) -> baml_py.ClassPropertyBuilder:
if name in self._properties:
raise ValueError(f"Property {name} already exists.")
@ -189,45 +210,46 @@ class NodeBuilder(NodeAst):
def list_properties(self) -> typing.List[typing.Tuple[str, baml_py.ClassPropertyBuilder]]:
return [(name, self._bldr.property(name)) for name in self._properties]
class NodeProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
def __getattr__(self, name: str) -> baml_py.ClassPropertyBuilder:
if name not in self.__properties:
raise AttributeError(f"Property {name} not found.")
return self.__bldr.property(name)
@property
def id(self) -> baml_py.ClassPropertyBuilder:
return self.__bldr.property("id")
@property
def name(self) -> baml_py.ClassPropertyBuilder:
return self.__bldr.property("name")
@property
def type(self) -> baml_py.ClassPropertyBuilder:
return self.__bldr.property("type")
@property
def description(self) -> baml_py.ClassPropertyBuilder:
return self.__bldr.property("description")
class SummarizedClassAst:
def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute)
_tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedClass")
self._properties: typing.Set[str] = set([ "name", "description", "methods", "decorators", ])
self._properties: typing.Set[str] = set(
[
"name",
"description",
"methods",
"decorators",
]
)
self._props = SummarizedClassProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType:
@ -242,43 +264,50 @@ class SummarizedClassViewer(SummarizedClassAst):
def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties]
return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedClassProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property
def name(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("name"))
@property
def description(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("description"))
@property
def methods(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("methods"))
@property
def decorators(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("decorators"))
class SummarizedCodeAst:
def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute)
_tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedCode")
self._properties: typing.Set[str] = set([ "high_level_summary", "key_features", "imports", "constants", "classes", "functions", "workflow_description", ])
self._properties: typing.Set[str] = set(
[
"high_level_summary",
"key_features",
"imports",
"constants",
"classes",
"functions",
"workflow_description",
]
)
self._props = SummarizedCodeProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType:
@ -293,55 +322,57 @@ class SummarizedCodeViewer(SummarizedCodeAst):
def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties]
return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedCodeProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property
def high_level_summary(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("high_level_summary"))
@property
def key_features(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("key_features"))
@property
def imports(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("imports"))
@property
def constants(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("constants"))
@property
def classes(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("classes"))
@property
def functions(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("functions"))
@property
def workflow_description(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("workflow_description"))
class SummarizedContentAst:
def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute)
_tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedContent")
self._properties: typing.Set[str] = set([ "summary", "description", ])
self._properties: typing.Set[str] = set(
[
"summary",
"description",
]
)
self._props = SummarizedContentProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType:
@ -356,35 +387,40 @@ class SummarizedContentViewer(SummarizedContentAst):
def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties]
return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedContentProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property
def summary(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("summary"))
@property
def description(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("description"))
class SummarizedFunctionAst:
def __init__(self, tb: type_builder.TypeBuilder):
_tb = tb._tb # type: ignore (we know how to use this private attribute)
_tb = tb._tb # type: ignore (we know how to use this private attribute)
self._bldr = _tb.class_("SummarizedFunction")
self._properties: typing.Set[str] = set([ "name", "description", "inputs", "outputs", "decorators", ])
self._properties: typing.Set[str] = set(
[
"name",
"description",
"inputs",
"outputs",
"decorators",
]
)
self._props = SummarizedFunctionProperties(self._bldr, self._properties)
def type(self) -> baml_py.FieldType:
@ -399,38 +435,34 @@ class SummarizedFunctionViewer(SummarizedFunctionAst):
def __init__(self, tb: type_builder.TypeBuilder):
super().__init__(tb)
def list_properties(self) -> typing.List[typing.Tuple[str, type_builder.ClassPropertyViewer]]:
return [(name, type_builder.ClassPropertyViewer(self._bldr.property(name))) for name in self._properties]
return [
(name, type_builder.ClassPropertyViewer(self._bldr.property(name)))
for name in self._properties
]
class SummarizedFunctionProperties:
def __init__(self, bldr: baml_py.ClassBuilder, properties: typing.Set[str]):
self.__bldr = bldr
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
self.__properties = properties # type: ignore (we know how to use this private attribute) # noqa: F821
@property
def name(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("name"))
@property
def description(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("description"))
@property
def inputs(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("inputs"))
@property
def outputs(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("outputs"))
@property
def decorators(self) -> type_builder.ClassPropertyViewer:
return type_builder.ClassPropertyViewer(self.__bldr.property("decorators"))

View file

@ -15,27 +15,18 @@ from . import stream_types
type_map = {
"types.Edge": types.Edge,
"stream_types.Edge": stream_types.Edge,
"types.KnowledgeGraph": types.KnowledgeGraph,
"stream_types.KnowledgeGraph": stream_types.KnowledgeGraph,
"types.Node": types.Node,
"stream_types.Node": stream_types.Node,
"types.SummarizedClass": types.SummarizedClass,
"stream_types.SummarizedClass": stream_types.SummarizedClass,
"types.SummarizedCode": types.SummarizedCode,
"stream_types.SummarizedCode": stream_types.SummarizedCode,
"types.SummarizedContent": types.SummarizedContent,
"stream_types.SummarizedContent": stream_types.SummarizedContent,
"types.SummarizedFunction": types.SummarizedFunction,
"stream_types.SummarizedFunction": stream_types.SummarizedFunction,
}
}

View file

@ -20,22 +20,29 @@ from pydantic import BaseModel, ConfigDict
import baml_py
CheckT = typing_extensions.TypeVar('CheckT')
CheckName = typing_extensions.TypeVar('CheckName', bound=str)
CheckT = typing_extensions.TypeVar("CheckT")
CheckName = typing_extensions.TypeVar("CheckName", bound=str)
class Check(BaseModel):
name: str
expression: str
status: str
class Checked(BaseModel, typing.Generic[CheckT, CheckName]):
value: CheckT
checks: typing.Dict[CheckName, Check]
def get_checks(checks: typing.Dict[CheckName, Check]) -> typing.List[Check]:
return list(checks.values())
def all_succeeded(checks: typing.Dict[CheckName, Check]) -> bool:
return all(check.status == "succeeded" for check in get_checks(checks))
# #########################################################################
# Generated enums (0)
# #########################################################################
@ -44,6 +51,7 @@ def all_succeeded(checks: typing.Dict[CheckName, Check]) -> bool:
# Generated classes (7)
# #########################################################################
class Edge(BaseModel):
# doc string for edge
# doc string for source_node_id
@ -52,23 +60,27 @@ class Edge(BaseModel):
target_node_id: str
relationship_name: str
class KnowledgeGraph(BaseModel):
nodes: typing.List["Node"]
edges: typing.List["Edge"]
class Node(BaseModel):
model_config = ConfigDict(extra='allow')
model_config = ConfigDict(extra="allow")
id: str
name: str
type: str
description: str
class SummarizedClass(BaseModel):
name: str
description: str
methods: typing.Optional[typing.List["SummarizedFunction"]] = None
decorators: typing.Optional[typing.List[str]] = None
class SummarizedCode(BaseModel):
high_level_summary: str
key_features: typing.List[str]
@ -78,10 +90,12 @@ class SummarizedCode(BaseModel):
functions: typing.List["SummarizedFunction"]
workflow_description: typing.Optional[str] = None
class SummarizedContent(BaseModel):
summary: str
description: str
class SummarizedFunction(BaseModel):
name: str
description: str
@ -89,6 +103,7 @@ class SummarizedFunction(BaseModel):
outputs: typing.Optional[typing.List[str]] = None
decorators: typing.Optional[typing.List[str]] = None
# #########################################################################
# Generated type aliases (0)
# #########################################################################

View file

@ -5,6 +5,7 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import model_validator
from baml_py import ClientRegistry
class LLMConfig(BaseSettings):
"""
Configuration settings for the LLM (Large Language Model) provider and related options.
@ -54,11 +55,15 @@ class LLMConfig(BaseSettings):
def model_post_init(self, __context) -> None:
"""Initialize the BAML registry after the model is created."""
self.baml_registry.add_llm_client(name=self.llm_provider, provider=self.llm_provider, options={
"model": self.llm_model,
"temperature": self.llm_temperature,
"api_key": self.llm_api_key
})
self.baml_registry.add_llm_client(
name=self.llm_provider,
provider=self.llm_provider,
options={
"model": self.llm_model,
"temperature": self.llm_temperature,
"api_key": self.llm_api_key,
},
)
# Sets the primary client
self.baml_registry.set_primary(self.llm_provider)

View file

@ -14,22 +14,22 @@ logger = get_logger("extract_summary_baml")
async def extract_summary(content: str, response_model: Type[BaseModel]):
"""
Extract summary using BAML framework.
Args:
content: The content to summarize
response_model: The Pydantic model type for the response
Returns:
BaseModel: The summarized content in the specified format
"""
config = get_llm_config()
# Use BAML's SummarizeContent function
summary_result = await b.SummarizeContent(content, baml_options={"tb": config.baml_registry})
# Convert BAML result to the expected response model
if response_model is SummarizedCode:
# If it's asking for SummarizedCode but we got SummarizedContent,
# If it's asking for SummarizedCode but we got SummarizedContent,
# we need to use SummarizeCode instead
code_result = await b.SummarizeCode(content, baml_options={"tb": config.baml_registry})
return code_result
@ -41,10 +41,10 @@ async def extract_summary(content: str, response_model: Type[BaseModel]):
async def extract_code_summary(content: str):
"""
Extract code summary using BAML framework with mocking support.
Args:
content: The code content to summarize
Returns:
SummarizedCode: The summarized code information
"""
@ -61,7 +61,9 @@ async def extract_code_summary(content: str):
config = get_llm_config()
result = await b.SummarizeCode(content, baml_options={"tb": config.baml_registry})
except Exception as e:
logger.error("Failed to extract code summary with BAML, falling back to mock summary", exc_info=e)
logger.error(
"Failed to extract code summary with BAML, falling back to mock summary", exc_info=e
)
result = get_mock_summarized_code()
return result

View file

@ -2,14 +2,14 @@ import os
from typing import Type
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.baml.baml_client.async_client import b
from cognee.infrastructure.llm.structured_output_framework.baml.baml_client.type_builder import TypeBuilder
from cognee.infrastructure.llm.structured_output_framework.baml.baml_client.type_builder import (
TypeBuilder,
)
from cognee.infrastructure.llm.structured_output_framework.baml_src.config import get_llm_config
from cognee.shared.logging_utils import get_logger, setup_logging
async def extract_content_graph(content: str, response_model: Type[BaseModel]):
# tb = TypeBuilder()
config = get_llm_config()
@ -19,9 +19,8 @@ async def extract_content_graph(content: str, response_model: Type[BaseModel]):
# ([tb.literal_string("USA"), tb.literal_string("UK"), tb.literal_string("Germany"), tb.literal_string("other")])
# tb.Node.add_property("country", country)
graph = await b.ExtractContentGraph(content, mode="simple", baml_options={ "tb": config.baml_registry})
graph = await b.ExtractContentGraph(
content, mode="simple", baml_options={"tb": config.baml_registry}
)
return graph

View file

@ -1,7 +1,11 @@
from typing import Type
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
async def extract_categories(content: str, response_model: Type[BaseModel]):

View file

@ -5,8 +5,12 @@ from typing import Type
from instructor.exceptions import InstructorRetryException
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.shared.data_models import SummarizedCode
from cognee.tasks.summarization.mock_summary import get_mock_summarized_code

View file

@ -1,9 +1,15 @@
import os
from typing import Type
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
async def extract_content_graph(content: str, response_model: Type[BaseModel]):

View file

@ -3,9 +3,16 @@ from pydantic import BaseModel
import instructor
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import rate_limit_async, sleep_and_retry_async
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async,
sleep_and_retry_async,
)
class AnthropicAdapter(LLMInterface):

View file

@ -6,7 +6,9 @@ import time
import asyncio
import random
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
logger = get_logger()

View file

@ -6,8 +6,12 @@ from litellm import acompletion, JSONSchemaValidationError
from cognee.shared.logging_utils import get_logger
from cognee.modules.observability.get_observe import get_observe
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async,
sleep_and_retry_async,

View file

@ -4,8 +4,13 @@ from typing import Type
from pydantic import BaseModel
import instructor
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import rate_limit_async, sleep_and_retry_async
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async,
sleep_and_retry_async,
)
import litellm

View file

@ -4,7 +4,9 @@ from enum import Enum
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.ollama.adapter import OllamaAPIAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.ollama.adapter import (
OllamaAPIAdapter,
)
# Define an Enum for LLM Providers
@ -59,7 +61,9 @@ def get_llm_client():
if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.openai.adapter import OpenAIAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.openai.adapter import (
OpenAIAdapter,
)
return OpenAIAdapter(
api_key=llm_config.llm_api_key,
@ -75,7 +79,9 @@ def get_llm_client():
if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import GenericAPIAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import (
GenericAPIAdapter,
)
return OllamaAPIAdapter(
llm_config.llm_endpoint,
@ -86,7 +92,9 @@ def get_llm_client():
)
elif provider == LLMProvider.ANTHROPIC:
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.anthropic import AnthropicAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.anthropic import (
AnthropicAdapter,
)
return AnthropicAdapter(max_tokens=max_tokens, model=llm_config.llm_model)
@ -94,7 +102,9 @@ def get_llm_client():
if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import GenericAPIAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.generic_llm_api import (
GenericAPIAdapter,
)
return GenericAPIAdapter(
llm_config.llm_endpoint,
@ -108,7 +118,9 @@ def get_llm_client():
if llm_config.llm_api_key is None:
raise InvalidValueError(message="LLM API key is not set.")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.gemini import GeminiAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.gemini import (
GeminiAdapter,
)
return GeminiAdapter(
api_key=llm_config.llm_api_key,

View file

@ -3,7 +3,9 @@
from typing import Type, Protocol
from abc import abstractmethod
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
class LLMInterface(Protocol):

View file

@ -1,7 +1,9 @@
from typing import Type
from pydantic import BaseModel
import instructor
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async,
rate_limit_sync,

View file

@ -7,8 +7,12 @@ from pydantic import BaseModel
from cognee.modules.data.processing.document_types.open_data_file import open_data_file
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
rate_limit_async,
rate_limit_sync,

View file

@ -49,7 +49,9 @@ from functools import wraps
from limits import RateLimitItemPerMinute, storage
from limits.strategies import MovingWindowRateLimiter
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
logger = get_logger()

View file

@ -24,7 +24,9 @@ class GeminiTokenizer(TokenizerInterface):
# Get LLM API key from config
from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
config = get_embedding_config()
llm_config = get_llm_config()

View file

@ -1,6 +1,8 @@
import litellm
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.shared.logging_utils import get_logger
logger = get_logger()

View file

@ -1,4 +1,6 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.chunking.Chunker import Chunker
from .Document import Document

View file

@ -1,4 +1,6 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.chunking.Chunker import Chunker
from .Document import Document

View file

@ -69,7 +69,10 @@ async def cognee_pipeline(
cognee_pipeline.first_run = True
if cognee_pipeline.first_run:
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import test_llm_connection, test_embedding_connection
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.utils import (
test_llm_connection,
test_embedding_connection,
)
# Test LLM and Embedding configuration once before running Cognee
await test_llm_connection()

View file

@ -6,8 +6,12 @@ from pydantic import BaseModel
from cognee.modules.retrieval.base_retriever import BaseRetriever
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
class CodeRetriever(BaseRetriever):

View file

@ -1,9 +1,14 @@
from typing import Any, Optional, List, Type
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever
from cognee.modules.retrieval.utils.completion import generate_completion
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
logger = get_logger()

View file

@ -2,8 +2,12 @@ from typing import Any, Optional
import logging
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.graph.networkx.adapter import NetworkXAdapter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
)
from cognee.modules.retrieval.base_retriever import BaseRetriever
from cognee.modules.retrieval.exceptions import SearchTypeNotSupported
from cognee.infrastructure.databases.graph.graph_db_interface import GraphDBInterface

View file

@ -1,5 +1,10 @@
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
async def generate_completion(

View file

@ -9,7 +9,9 @@ from cognee.modules.users.methods import get_default_user
from cognee.modules.users.models import User
from cognee.shared.utils import send_telemetry
from cognee.modules.search.methods import search
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
logger = get_logger(level=ERROR)

View file

@ -4,7 +4,9 @@ from enum import Enum, auto
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Field
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
if get_llm_config().llm_provider.lower() == "gemini":
"""

View file

@ -7,7 +7,9 @@ from pydantic import BaseModel
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.engine.models import DataPoint
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction.extract_categories import extract_categories
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction.extract_categories import (
extract_categories,
)
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk

View file

@ -6,8 +6,13 @@ from pydantic import BaseModel
from cognee.infrastructure.entities.BaseEntityExtractor import BaseEntityExtractor
from cognee.modules.engine.models import Entity
from cognee.modules.engine.models.EntityType import EntityType
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
logger = get_logger("llm_entity_extractor")

View file

@ -1,8 +1,13 @@
from typing import List, Tuple
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt, read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
read_query_prompt,
)
from cognee.root_dir import get_absolute_path

View file

@ -1,6 +1,11 @@
from typing import List
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt, read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
read_query_prompt,
)
from cognee.shared.data_models import KnowledgeGraph
from cognee.root_dir import get_absolute_path

View file

@ -1,8 +1,13 @@
from typing import List
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import render_prompt, read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
render_prompt,
read_query_prompt,
)
from cognee.root_dir import get_absolute_path

View file

@ -1,19 +1,26 @@
import asyncio
from typing import Type, List
from pydantic import BaseModel
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_content_graph
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_content_graph,
)
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
from cognee.tasks.storage import add_data_points
from cognee.base_config import get_base_config
base = get_base_config()
if base.structured_output_framework == 'BAML':
if base.structured_output_framework == "BAML":
print(f"Using BAML framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_content_graph
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_content_graph,
)
else:
print(f"Using llitellm_instructor framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_content_graph
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_content_graph,
)
async def extract_graph_from_code(
data_chunks: list[DocumentChunk], graph_model: Type[BaseModel]

View file

@ -7,13 +7,18 @@ from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.modules.ontology.rdf_xml.OntologyResolver import OntologyResolver
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
from cognee.base_config import get_base_config
base = get_base_config()
if base.structured_output_framework == 'BAML':
if base.structured_output_framework == "BAML":
print(f"Using BAML framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_content_graph
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_content_graph,
)
else:
print(f"Using llitellm_instructor framework: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_content_graph
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_content_graph,
)
from cognee.modules.graph.utils import (
expand_with_nodes_and_edges,

View file

@ -15,8 +15,12 @@ from pydantic import BaseModel
from cognee.modules.graph.exceptions import EntityNotFoundError
from cognee.modules.ingestion.exceptions import IngestionError
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.infrastructure.data.chunking.config import get_chunk_config
from cognee.infrastructure.data.chunking.get_chunking_engine import get_chunk_engine
from cognee.infrastructure.databases.graph.get_graph_engine import get_graph_engine

View file

@ -6,12 +6,18 @@ from cognee.infrastructure.engine import DataPoint
from cognee.base_config import get_base_config
base = get_base_config()
if base.structured_output_framework == 'BAML':
if base.structured_output_framework == "BAML":
print(f"Using BAML framework for code summarization: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_code_summary
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_code_summary,
)
else:
print(f"Using llitellm_instructor framework for code summarization: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_code_summary
print(
f"Using llitellm_instructor framework for code summarization: {base.structured_output_framework}"
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_code_summary,
)
from .models import CodeSummary

View file

@ -5,12 +5,18 @@ from pydantic import BaseModel
from cognee.base_config import get_base_config
base = get_base_config()
if base.structured_output_framework == 'BAML':
if base.structured_output_framework == "BAML":
print(f"Using BAML framework for text summarization: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import extract_summary
from cognee.infrastructure.llm.structured_output_framework.baml_src.extraction import (
extract_summary,
)
else:
print(f"Using llitellm_instructor framework for text summarization: {base.structured_output_framework}")
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import extract_summary
print(
f"Using llitellm_instructor framework for text summarization: {base.structured_output_framework}"
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.extraction import (
extract_summary,
)
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
from cognee.modules.cognify.config import get_cognify_config

View file

@ -3,8 +3,12 @@ import time
import asyncio
import logging
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import EmbeddingRateLimiter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.embedding_rate_limiter import (
EmbeddingRateLimiter,
)
from cognee.tests.unit.infrastructure.mock_embedding_engine import MockEmbeddingEngine
# Configure logging

View file

@ -2,8 +2,12 @@ import asyncio
import os
from unittest.mock import patch
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import llm_rate_limiter
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import get_llm_config
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.rate_limiter import (
llm_rate_limiter,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.config import (
get_llm_config,
)
async def test_rate_limiting_realistic():

View file

@ -3,7 +3,9 @@ import logging
import cognee
import asyncio
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from dotenv import load_dotenv
from cognee.api.v1.search import SearchType
from cognee.modules.engine.models import NodeSet

View file

@ -12,8 +12,13 @@ from cognee.tasks.temporal_awareness.index_graphiti_objects import (
)
from cognee.modules.retrieval.utils.brute_force_triplet_search import brute_force_triplet_search
from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import read_query_prompt, render_prompt
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.prompts import (
read_query_prompt,
render_prompt,
)
from cognee.infrastructure.llm.structured_output_framework.llitellm_instructor.llm.get_llm_client import (
get_llm_client,
)
from cognee.modules.users.methods import get_default_user
text_list = [