oidc / opensearch auth integration -- search endpoint

This commit is contained in:
phact 2025-08-11 16:45:54 -04:00
parent 925ecc17a1
commit 8def00d6d6
26 changed files with 497 additions and 77 deletions

3
.gitignore vendored
View file

@ -10,6 +10,9 @@ wheels/
.venv
.env
# RSA keys for JWT signing
keys/
.idea/
1001*.pdf

View file

@ -38,3 +38,20 @@ RUN echo y | opensearch-plugin install https://repo1.maven.org/maven2/org/opense
RUN echo y | opensearch-plugin install repository-gcs
RUN echo y | opensearch-plugin install repository-azure
RUN echo y | opensearch-plugin install repository-s3
# Copy OIDC and DLS security configuration
COPY securityconfig/ /usr/share/opensearch/securityconfig/
# Create a script to apply security configuration after OpenSearch starts
RUN echo '#!/bin/bash' > /usr/share/opensearch/setup-security.sh && \
echo 'echo "Waiting for OpenSearch to start..."' >> /usr/share/opensearch/setup-security.sh && \
echo 'until curl -s -k -u admin:${OPENSEARCH_INITIAL_ADMIN_PASSWORD} https://localhost:9200; do sleep 1; done' >> /usr/share/opensearch/setup-security.sh && \
echo 'echo "Applying OIDC and DLS security configuration..."' >> /usr/share/opensearch/setup-security.sh && \
echo '/usr/share/opensearch/plugins/opensearch-security/tools/securityadmin.sh \' >> /usr/share/opensearch/setup-security.sh && \
echo ' -cd /usr/share/opensearch/securityconfig \' >> /usr/share/opensearch/setup-security.sh && \
echo ' -icl -nhnv \' >> /usr/share/opensearch/setup-security.sh && \
echo ' -cacert /usr/share/opensearch/config/root-ca.pem \' >> /usr/share/opensearch/setup-security.sh && \
echo ' -cert /usr/share/opensearch/config/kirk.pem \' >> /usr/share/opensearch/setup-security.sh && \
echo ' -key /usr/share/opensearch/config/kirk-key.pem' >> /usr/share/opensearch/setup-security.sh && \
echo 'echo "Security configuration applied successfully"' >> /usr/share/opensearch/setup-security.sh && \
chmod +x /usr/share/opensearch/setup-security.sh

View file

@ -1,7 +1,7 @@
FROM python:3.13-slim
# Install curl for uv installation
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
# Install curl for uv installation and openssl for RSA key generation
RUN apt-get update && apt-get install -y curl openssl && rm -rf /var/lib/apt/lists/*
# Install uv
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
@ -22,6 +22,14 @@ RUN uv run python warm_up_docling.py && rm warm_up_docling.py 2506.08231v1.pdf
# Copy Python source
COPY src/ ./src/
# Generate RSA keys for JWT signing if they don't exist
RUN mkdir -p keys && \
if [ ! -f keys/private_key.pem ]; then \
openssl genrsa -out keys/private_key.pem 2048 && \
openssl rsa -in keys/private_key.pem -pubout -out keys/public_key.pem && \
echo "Generated RSA keys for JWT signing"; \
fi
# Expose backend port
EXPOSE 8000

View file

@ -4,9 +4,23 @@ services:
context: .
dockerfile: Dockerfile
container_name: os
depends_on:
- gendb-backend
environment:
- discovery.type=single-node
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_PASSWORD}
# Run security setup in background after OpenSearch starts
command: >
bash -c "
# Start OpenSearch in background
/usr/share/opensearch/opensearch-docker-entrypoint.sh opensearch &
# Wait a bit for OpenSearch to start, then apply security config
sleep 10 && /usr/share/opensearch/setup-security.sh &
# Wait for background processes
wait
"
ports:
- "9200:9200"
- "9600:9600"
@ -30,7 +44,6 @@ services:
#dockerfile: Dockerfile.backend
container_name: gendb-backend
depends_on:
- opensearch
- langflow
environment:
- OPENSEARCH_HOST=opensearch
@ -50,6 +63,7 @@ services:
- ./pyproject.toml:/app/pyproject.toml
- ./uv.lock:/app/uv.lock
- ./documents:/app/documents
- ./keys:/app/keys
gpus: all
platform: linux/amd64

View file

@ -5,8 +5,10 @@ description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
# "agentd>=0.2.2",
"agentd>=0.2.1",
"aiofiles>=24.1.0",
"cryptography>=45.0.6",
"docling>=2.41.0",
"google-api-python-client>=2.143.0",
"google-auth-httplib2>=0.2.0",
@ -21,6 +23,7 @@ dependencies = [
]
[tool.uv.sources]
#agentd = { path = "/home/tato/Desktop/agentd" }
torch = [
{ index = "pytorch-cu128" },
]

View file

@ -0,0 +1,5 @@
_meta:
type: "actiongroups"
config_version: 2
# Empty - using built-in OpenSearch action groups only

31
securityconfig/config.yml Normal file
View file

@ -0,0 +1,31 @@
_meta:
type: "config"
config_version: 2
config:
dynamic:
authc:
openid_auth_domain:
order: 0
http_enabled: true
http_authenticator:
type: openid
challenge: false
config:
openid_connect_url: "http://gendb-backend:8000/.well-known/openid-configuration"
subject_key: "sub"
jwt_header: "Authorization" # expects Bearer token
roles_key: "roles"
authentication_backend:
type: noop
basic_internal_auth_domain:
order: 1
http_enabled: true
http_authenticator:
type: basic
challenge: true
authentication_backend:
type: intern

View file

@ -0,0 +1,7 @@
_meta:
type: "internalusers"
config_version: 2
admin:
reserved: true
hash: "$2y$12$YA8.WOaVAWKEZqEbP48bIe4R70/8zo.ozDAlwjKxczVBumZ.Ol5fS"

View file

@ -0,0 +1,5 @@
_meta:
type: "nodesdn"
config_version: 2
# Empty nodes DN - using default node certificate validation

19
securityconfig/roles.yml Normal file
View file

@ -0,0 +1,19 @@
_meta:
type: "roles"
config_version: 2
gendb_user_role:
description: "DLS: user can read/write docs they own or are allowed on"
cluster_permissions:
- "indices:data/write/bulk"
index_permissions:
- index_patterns: ["documents", "documents*"]
allowed_actions:
- crud
- create_index
dls: >
{"bool":{"should":[
{"term":{"owner":"${user.name}"}},
{"term":{"allowed_users":"${user.name}"}},
{"bool":{"must_not":{"exists":{"field":"owner"}}}}
],"minimum_should_match":1}}

View file

@ -0,0 +1,13 @@
_meta:
type: "rolesmapping"
config_version: 2
gendb_user_role:
users: []
hosts: []
backend_roles:
- "gendb_user"
all_access:
users:
- "admin"

View file

@ -0,0 +1,5 @@
_meta:
type: "tenants"
config_version: 2
# Empty tenants - using global tenant only

View file

@ -32,7 +32,7 @@ async def auth_callback(request: Request, auth_service, session_manager):
state = data.get("state")
result = await auth_service.handle_oauth_callback(
connection_id, authorization_code, state
connection_id, authorization_code, state, request
)
# If this is app auth, set JWT cookie

102
src/api/oidc.py Normal file
View file

@ -0,0 +1,102 @@
from starlette.requests import Request
from starlette.responses import JSONResponse
import json
import base64
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
async def oidc_discovery(request: Request, session_manager):
"""OIDC discovery endpoint"""
base_url = str(request.base_url).rstrip('/')
discovery_config = {
"issuer": base_url,
"authorization_endpoint": f"{base_url}/auth/init",
"token_endpoint": f"{base_url}/auth/callback",
"jwks_uri": f"{base_url}/auth/jwks",
"userinfo_endpoint": f"{base_url}/auth/me",
"response_types_supported": ["code"],
"subject_types_supported": ["public"],
"id_token_signing_alg_values_supported": ["RS256"],
"scopes_supported": ["openid", "email", "profile"],
"token_endpoint_auth_methods_supported": ["client_secret_basic"],
"claims_supported": [
"sub", "iss", "aud", "exp", "iat", "auth_time",
"email", "email_verified", "name", "preferred_username"
]
}
return JSONResponse(discovery_config)
async def jwks_endpoint(request: Request, session_manager):
"""JSON Web Key Set endpoint"""
try:
# Get the public key from session manager
public_key_pem = session_manager.public_key_pem
# Parse the PEM to extract key components
public_key = serialization.load_pem_public_key(public_key_pem.encode())
# Convert RSA components to base64url
def int_to_base64url(value):
# Convert integer to bytes, then to base64url
byte_length = (value.bit_length() + 7) // 8
value_bytes = value.to_bytes(byte_length, byteorder='big')
return base64.urlsafe_b64encode(value_bytes).decode('ascii').rstrip('=')
# Get public key components
public_numbers = public_key.public_numbers()
jwk = {
"kty": "RSA",
"use": "sig",
"alg": "RS256",
"kid": "gendb-key-1",
"n": int_to_base64url(public_numbers.n),
"e": int_to_base64url(public_numbers.e)
}
jwks = {
"keys": [jwk]
}
return JSONResponse(jwks)
except Exception as e:
return JSONResponse(
{"error": f"Failed to generate JWKS: {str(e)}"},
status_code=500
)
async def token_introspection(request: Request, session_manager):
"""Token introspection endpoint (optional)"""
try:
data = await request.json()
token = data.get("token")
if not token:
return JSONResponse({"active": False})
# Verify the token
payload = session_manager.verify_token(token)
if payload:
return JSONResponse({
"active": True,
"sub": payload.get("sub"),
"aud": payload.get("aud"),
"iss": payload.get("iss"),
"exp": payload.get("exp"),
"iat": payload.get("iat"),
"email": payload.get("email"),
"name": payload.get("name"),
"preferred_username": payload.get("preferred_username")
})
else:
return JSONResponse({"active": False})
except Exception as e:
return JSONResponse(
{"error": f"Token introspection failed: {str(e)}"},
status_code=500
)

View file

@ -9,5 +9,8 @@ async def search(request: Request, search_service, session_manager):
return JSONResponse({"error": "Query is required"}, status_code=400)
user = request.state.user
result = await search_service.search(query, user_id=user.user_id)
# Extract JWT token from cookie for OpenSearch OIDC auth
jwt_token = request.cookies.get("auth_token")
result = await search_service.search(query, user_id=user.user_id, jwt_token=jwt_token)
return JSONResponse(result)

View file

@ -7,8 +7,9 @@ async def upload(request: Request, document_service, session_manager):
form = await request.form()
upload_file = form["file"]
user = request.state.user
jwt_token = request.cookies.get("auth_token")
result = await document_service.process_upload_file(upload_file, owner_user_id=user.user_id)
result = await document_service.process_upload_file(upload_file, owner_user_id=user.user_id, jwt_token=jwt_token)
return JSONResponse(result)
async def upload_path(request: Request, task_service, session_manager):
@ -26,7 +27,8 @@ async def upload_path(request: Request, task_service, session_manager):
return JSONResponse({"error": "No files found in directory"}, status_code=400)
user = request.state.user
task_id = await task_service.create_upload_task(user.user_id, file_paths)
jwt_token = request.cookies.get("auth_token")
task_id = await task_service.create_upload_task(user.user_id, file_paths, jwt_token=jwt_token)
return JSONResponse({
"task_id": task_id,

View file

@ -103,6 +103,21 @@ class AppClients:
self.converter = DocumentConverter()
return self
def create_user_opensearch_client(self, jwt_token: str):
"""Create OpenSearch client with user's JWT token for OIDC auth"""
headers = {'Authorization': f'Bearer {jwt_token}'}
return AsyncOpenSearch(
hosts=[{"host": OPENSEARCH_HOST, "port": OPENSEARCH_PORT}],
connection_class=AIOHttpConnection,
scheme="https",
use_ssl=True,
verify_certs=False,
ssl_assert_fingerprint=None,
headers=headers,
http_compress=True,
)
# Global clients instance
clients = AppClients()

View file

@ -11,13 +11,13 @@ from .connection_manager import ConnectionManager
class ConnectorService:
"""Service to manage document connectors and process files"""
def __init__(self, opensearch_client, patched_async_client, process_pool, embed_model: str, index_name: str, task_service=None):
self.opensearch = opensearch_client
def __init__(self, patched_async_client, process_pool, embed_model: str, index_name: str, task_service=None, session_manager=None):
self.openai_client = patched_async_client
self.process_pool = process_pool
self.embed_model = embed_model
self.index_name = index_name
self.task_service = task_service
self.session_manager = session_manager
self.connection_manager = ConnectionManager()
async def initialize(self):
@ -28,7 +28,7 @@ class ConnectorService:
"""Get a connector by connection ID"""
return await self.connection_manager.get_connector(connection_id)
async def process_connector_document(self, document: ConnectorDocument, owner_user_id: str) -> Dict[str, Any]:
async def process_connector_document(self, document: ConnectorDocument, owner_user_id: str, jwt_token: str = None) -> Dict[str, Any]:
"""Process a document from a connector using existing processing pipeline"""
# Create temporary file from document content
@ -40,20 +40,21 @@ class ConnectorService:
# Use existing process_file_common function with connector document metadata
# We'll use the document service's process_file_common method
from services.document_service import DocumentService
doc_service = DocumentService()
doc_service = DocumentService(session_manager=self.session_manager)
# Process using the existing pipeline but with connector document metadata
result = await doc_service.process_file_common(
file_path=tmp_file.name,
file_hash=document.id, # Use connector document ID as hash
owner_user_id=owner_user_id,
original_filename=document.filename # Pass the original Google Doc title
original_filename=document.filename, # Pass the original Google Doc title
jwt_token=jwt_token
)
# If successfully indexed, update the indexed documents with connector metadata
if result["status"] == "indexed":
# Update all chunks with connector-specific metadata
await self._update_connector_metadata(document, owner_user_id)
await self._update_connector_metadata(document, owner_user_id, jwt_token)
return {
**result,
@ -65,7 +66,7 @@ class ConnectorService:
# Clean up temporary file
os.unlink(tmp_file.name)
async def _update_connector_metadata(self, document: ConnectorDocument, owner_user_id: str):
async def _update_connector_metadata(self, document: ConnectorDocument, owner_user_id: str, jwt_token: str = None):
"""Update indexed chunks with connector-specific metadata"""
# Find all chunks for this document
query = {
@ -74,7 +75,10 @@ class ConnectorService:
}
}
response = await self.opensearch.search(index=self.index_name, body=query)
# Get user's OpenSearch client
opensearch_client = self.session_manager.get_user_opensearch_client(owner_user_id, jwt_token)
response = await opensearch_client.search(index=self.index_name, body=query)
# Update each chunk with connector metadata
for hit in response["hits"]["hits"]:
@ -96,7 +100,7 @@ class ConnectorService:
}
}
await self.opensearch.update(index=self.index_name, id=chunk_id, body=update_body)
await opensearch_client.update(index=self.index_name, id=chunk_id, body=update_body)
def _get_file_extension(self, mimetype: str) -> str:
"""Get file extension based on MIME type"""

View file

@ -30,7 +30,7 @@ from session_manager import SessionManager
from auth_middleware import require_auth, optional_auth
# API endpoints
from api import upload, search, chat, auth, connectors, tasks
from api import upload, search, chat, auth, connectors, tasks, oidc
print("CUDA available:", torch.cuda.is_available())
print("CUDA version PyTorch was built with:", torch.version.cuda)
@ -53,7 +53,7 @@ async def wait_for_opensearch():
raise Exception("OpenSearch failed to become ready")
async def init_index():
"""Initialize OpenSearch index"""
"""Initialize OpenSearch index and security roles"""
await wait_for_opensearch()
if not await clients.opensearch.indices.exists(index=INDEX_NAME):
@ -62,6 +62,16 @@ async def init_index():
else:
print(f"Index '{INDEX_NAME}' already exists, skipping creation.")
async def init_index_when_ready():
"""Initialize OpenSearch index when it becomes available"""
try:
await init_index()
print("OpenSearch index initialization completed successfully")
except Exception as e:
print(f"OpenSearch index initialization failed: {e}")
print("OIDC endpoints will still work, but document operations may fail until OpenSearch is ready")
def initialize_services():
"""Initialize all services and their dependencies"""
# Initialize clients
@ -71,8 +81,8 @@ def initialize_services():
session_manager = SessionManager(SESSION_SECRET)
# Initialize services
document_service = DocumentService()
search_service = SearchService()
document_service = DocumentService(session_manager=session_manager)
search_service = SearchService(session_manager)
task_service = TaskService(document_service, process_pool)
chat_service = ChatService()
@ -81,12 +91,12 @@ def initialize_services():
# Initialize connector service
connector_service = ConnectorService(
opensearch_client=clients.opensearch,
patched_async_client=clients.patched_async_client,
process_pool=process_pool,
embed_model="text-embedding-3-small",
index_name=INDEX_NAME,
task_service=task_service
task_service=task_service,
session_manager=session_manager
)
# Initialize auth service
@ -223,15 +233,32 @@ def create_app():
connector_service=services['connector_service'],
session_manager=services['session_manager']),
methods=["POST", "GET"]),
# OIDC endpoints
Route("/.well-known/openid-configuration",
partial(oidc.oidc_discovery,
session_manager=services['session_manager']),
methods=["GET"]),
Route("/auth/jwks",
partial(oidc.jwks_endpoint,
session_manager=services['session_manager']),
methods=["GET"]),
Route("/auth/introspect",
partial(oidc.token_introspection,
session_manager=services['session_manager']),
methods=["POST"]),
]
app = Starlette(debug=True, routes=routes)
app.state.services = services # Store services for cleanup
# Add startup event handler
@app.on_event("startup")
@app.on_event("startup")
async def startup_event():
await init_index()
# Start index initialization in background to avoid blocking OIDC endpoints
asyncio.create_task(init_index_when_ready())
# Add shutdown event handler
@app.on_event("shutdown")

View file

@ -22,13 +22,19 @@ class TaskProcessor(ABC):
class DocumentFileProcessor(TaskProcessor):
"""Default processor for regular file uploads"""
def __init__(self, document_service):
def __init__(self, document_service, owner_user_id: str = None, jwt_token: str = None):
self.document_service = document_service
self.owner_user_id = owner_user_id
self.jwt_token = jwt_token
async def process_item(self, upload_task: UploadTask, item: str, file_task: FileTask) -> None:
"""Process a regular file path using DocumentService"""
# This calls the existing logic
await self.document_service.process_single_file_task(upload_task, item)
# This calls the existing logic with user context
await self.document_service.process_single_file_task(
upload_task, item,
owner_user_id=self.owner_user_id,
jwt_token=self.jwt_token
)
class ConnectorFileProcessor(TaskProcessor):

View file

@ -71,7 +71,7 @@ class AuthService:
}
async def handle_oauth_callback(self, connection_id: str, authorization_code: str,
state: str = None) -> dict:
state: str = None, request=None) -> dict:
"""Handle OAuth callback - exchange authorization code for tokens"""
if not all([connection_id, authorization_code]):
raise ValueError("Missing required parameters (connection_id, authorization_code)")
@ -136,7 +136,7 @@ class AuthService:
purpose = connection_config.config.get("purpose", "data_source")
if purpose == "app_auth":
return await self._handle_app_auth(connection_id, connection_config, token_data)
return await self._handle_app_auth(connection_id, connection_config, token_data, request)
else:
return await self._handle_data_source_auth(connection_id, connection_config)
@ -145,9 +145,18 @@ class AuthService:
self.used_auth_codes.discard(authorization_code)
raise e
async def _handle_app_auth(self, connection_id: str, connection_config, token_data: dict) -> dict:
async def _handle_app_auth(self, connection_id: str, connection_config, token_data: dict, request=None) -> dict:
"""Handle app authentication - create user session"""
jwt_token = await self.session_manager.create_user_session(token_data["access_token"])
# Extract issuer from redirect_uri in connection config
redirect_uri = connection_config.config.get("redirect_uri")
if not redirect_uri:
raise ValueError("redirect_uri not found in connection config")
# Get base URL from redirect_uri (remove path)
from urllib.parse import urlparse
parsed = urlparse(redirect_uri)
issuer = f"{parsed.scheme}://{parsed.netloc}"
jwt_token = await self.session_manager.create_user_session(token_data["access_token"], issuer)
if jwt_token:
# Get the user info to create a persistent Google Drive connection

View file

@ -77,10 +77,11 @@ def chunk_texts_for_embeddings(texts: List[str], max_tokens: int = None, model:
return batches
class DocumentService:
def __init__(self, process_pool=None):
def __init__(self, process_pool=None, session_manager=None):
self.process_pool = process_pool
self.session_manager = session_manager
async def process_file_common(self, file_path: str, file_hash: str = None, owner_user_id: str = None, original_filename: str = None):
async def process_file_common(self, file_path: str, file_hash: str = None, owner_user_id: str = None, original_filename: str = None, jwt_token: str = None):
"""
Common processing logic for both upload and upload_path.
1. Optionally compute SHA256 hash if not provided.
@ -98,7 +99,10 @@ class DocumentService:
sha256.update(chunk)
file_hash = sha256.hexdigest()
exists = await clients.opensearch.exists(index=INDEX_NAME, id=file_hash)
# Get user's OpenSearch client with JWT for OIDC auth
opensearch_client = self.session_manager.get_user_opensearch_client(owner_user_id, jwt_token)
exists = await opensearch_client.exists(index=INDEX_NAME, id=file_hash)
if exists:
return {"status": "unchanged", "id": file_hash}
@ -130,10 +134,10 @@ class DocumentService:
"indexed_time": datetime.datetime.now().isoformat()
}
chunk_id = f"{file_hash}_{i}"
await clients.opensearch.index(index=INDEX_NAME, id=chunk_id, body=chunk_doc)
await opensearch_client.index(index=INDEX_NAME, id=chunk_id, body=chunk_doc)
return {"status": "indexed", "id": file_hash}
async def process_upload_file(self, upload_file, owner_user_id: str = None):
async def process_upload_file(self, upload_file, owner_user_id: str = None, jwt_token: str = None):
"""Process an uploaded file from form data"""
sha256 = hashlib.sha256()
tmp = tempfile.NamedTemporaryFile(delete=False)
@ -147,11 +151,13 @@ class DocumentService:
tmp.flush()
file_hash = sha256.hexdigest()
exists = await clients.opensearch.exists(index=INDEX_NAME, id=file_hash)
# Get user's OpenSearch client with JWT for OIDC auth
opensearch_client = self.session_manager.get_user_opensearch_client(owner_user_id, jwt_token)
exists = await opensearch_client.exists(index=INDEX_NAME, id=file_hash)
if exists:
return {"status": "unchanged", "id": file_hash}
result = await self.process_file_common(tmp.name, file_hash, owner_user_id=owner_user_id)
result = await self.process_file_common(tmp.name, file_hash, owner_user_id=owner_user_id, jwt_token=jwt_token)
return result
finally:
@ -194,7 +200,7 @@ class DocumentService:
"content_length": len(full_content)
}
async def process_single_file_task(self, upload_task, file_path: str):
async def process_single_file_task(self, upload_task, file_path: str, owner_user_id: str = None, jwt_token: str = None):
"""Process a single file and update task tracking - used by task service"""
from models.tasks import TaskStatus
import time
@ -212,7 +218,8 @@ class DocumentService:
slim_doc = await loop.run_in_executor(self.process_pool, process_document_sync, file_path)
# Check if already indexed
exists = await clients.opensearch.exists(index=INDEX_NAME, id=slim_doc["id"])
opensearch_client = self.session_manager.get_user_opensearch_client(owner_user_id, jwt_token)
exists = await opensearch_client.exists(index=INDEX_NAME, id=slim_doc["id"])
if exists:
result = {"status": "unchanged", "id": slim_doc["id"]}
else:
@ -235,10 +242,12 @@ class DocumentService:
"mimetype": slim_doc["mimetype"],
"page": chunk["page"],
"text": chunk["text"],
"chunk_embedding": vect
"chunk_embedding": vect,
"owner": owner_user_id,
"indexed_time": datetime.datetime.now().isoformat()
}
chunk_id = f"{slim_doc['id']}_{i}"
await clients.opensearch.index(index=INDEX_NAME, id=chunk_id, body=chunk_doc)
await opensearch_client.index(index=INDEX_NAME, id=chunk_id, body=chunk_doc)
result = {"status": "indexed", "id": slim_doc["id"]}

View file

@ -3,9 +3,11 @@ from agentd.tool_decorator import tool
from config.settings import clients, INDEX_NAME, EMBED_MODEL
class SearchService:
def __init__(self, session_manager=None):
self.session_manager = session_manager
@tool
async def search_tool(self, query: str, user_id: str = None) -> Dict[str, Any]:
@tool # TODO: This will be broken until we figure out how to pass JWT through @tool decorator
async def search_tool(self, query: str, user_id: str = None, jwt_token: str = None) -> Dict[str, Any]:
"""
Use this tool to search for documents relevant to the query.
@ -40,26 +42,13 @@ class SearchService:
"size": 10
}
# Require authentication - no anonymous access to search
# Authentication required - DLS will handle document filtering automatically
if not user_id:
return {"results": [], "error": "Authentication required"}
# Authenticated user access control
# User can access documents if:
# 1. They own the document (owner field matches user_id)
# 2. They're in allowed_users list
# 3. Document has no ACL (public documents)
# TODO: Add group access control later
should_clauses = [
{"term": {"owner": user_id}},
{"term": {"allowed_users": user_id}},
{"bool": {"must_not": {"exists": {"field": "owner"}}}} # Public docs
]
search_body["query"]["bool"]["should"] = should_clauses
search_body["query"]["bool"]["minimum_should_match"] = 1
results = await clients.opensearch.search(index=INDEX_NAME, body=search_body)
# Get user's OpenSearch client with JWT for OIDC auth
opensearch_client = self.session_manager.get_user_opensearch_client(user_id, jwt_token)
results = await opensearch_client.search(index=INDEX_NAME, body=search_body)
# Transform results
chunks = []
@ -75,6 +64,6 @@ class SearchService:
})
return {"results": chunks}
async def search(self, query: str, user_id: str = None) -> Dict[str, Any]:
async def search(self, query: str, user_id: str = None, jwt_token: str = None) -> Dict[str, Any]:
"""Public search method for API endpoints"""
return await self.search_tool(query, user_id)
return await self.search_tool(query, user_id, jwt_token)

View file

@ -24,11 +24,11 @@ class TaskService:
delay = min(base_delay * (2 ** retry_count) + random.uniform(0, 1), max_delay)
await asyncio.sleep(delay)
async def create_upload_task(self, user_id: str, file_paths: list) -> str:
async def create_upload_task(self, user_id: str, file_paths: list, jwt_token: str = None) -> str:
"""Create a new upload task for bulk file processing"""
# Use default DocumentFileProcessor
# Use default DocumentFileProcessor with user context
from models.processors import DocumentFileProcessor
processor = DocumentFileProcessor(self.document_service)
processor = DocumentFileProcessor(self.document_service, owner_user_id=user_id, jwt_token=jwt_token)
return await self.create_custom_task(user_id, file_paths, processor)
async def create_custom_task(self, user_id: str, items: list, processor) -> str:

View file

@ -4,6 +4,8 @@ import httpx
from datetime import datetime, timedelta
from typing import Dict, Optional, Any
from dataclasses import dataclass, asdict
from cryptography.hazmat.primitives import serialization
import os
@dataclass
@ -27,9 +29,36 @@ class User:
class SessionManager:
"""Manages user sessions and JWT tokens"""
def __init__(self, secret_key: str):
self.secret_key = secret_key
def __init__(self, secret_key: str = None, private_key_path: str = "keys/private_key.pem",
public_key_path: str = "keys/public_key.pem"):
self.secret_key = secret_key # Keep for backward compatibility
self.users: Dict[str, User] = {} # user_id -> User
self.user_opensearch_clients: Dict[str, Any] = {} # user_id -> OpenSearch client
# Load RSA keys
self.private_key_path = private_key_path
self.public_key_path = public_key_path
self._load_rsa_keys()
def _load_rsa_keys(self):
"""Load RSA private and public keys"""
try:
with open(self.private_key_path, 'rb') as f:
self.private_key = serialization.load_pem_private_key(
f.read(),
password=None
)
with open(self.public_key_path, 'rb') as f:
self.public_key = serialization.load_pem_public_key(f.read())
# Also get public key in PEM format for JWKS
self.public_key_pem = open(self.public_key_path, 'r').read()
except FileNotFoundError as e:
raise Exception(f"RSA key files not found: {e}")
except Exception as e:
raise Exception(f"Failed to load RSA keys: {e}")
async def get_user_info_from_token(self, access_token: str) -> Optional[Dict[str, Any]]:
"""Get user info from Google using access token"""
@ -50,7 +79,7 @@ class SessionManager:
print(f"Error getting user info: {e}")
return None
async def create_user_session(self, access_token: str) -> Optional[str]:
async def create_user_session(self, access_token: str, issuer: str) -> Optional[str]:
"""Create user session from OAuth access token"""
user_info = await self.get_user_info_from_token(access_token)
if not user_info:
@ -72,22 +101,40 @@ class SessionManager:
else:
self.users[user_id] = user
# Create JWT token
# Use provided issuer
# Create JWT token with OIDC-compliant claims
now = datetime.utcnow()
token_payload = {
"user_id": user_id,
# OIDC standard claims
"iss": issuer, # Issuer from request
"sub": user_id, # Subject (user ID)
"aud": ["opensearch", "gendb"], # Audience
"exp": now + timedelta(days=7), # Expiration
"iat": now, # Issued at
"auth_time": int(now.timestamp()), # Authentication time
# Custom claims
"user_id": user_id, # Keep for backward compatibility
"email": user.email,
"name": user.name,
"exp": datetime.utcnow() + timedelta(days=7), # 7 day expiry
"iat": datetime.utcnow()
"preferred_username": user.email,
"email_verified": True,
"roles": ["gendb_user"] # Backend role for OpenSearch
}
token = jwt.encode(token_payload, self.secret_key, algorithm="HS256")
token = jwt.encode(token_payload, self.private_key, algorithm="RS256")
return token
def verify_token(self, token: str) -> Optional[Dict[str, Any]]:
"""Verify JWT token and return user info"""
try:
payload = jwt.decode(token, self.secret_key, algorithms=["HS256"])
payload = jwt.decode(
token,
self.public_key,
algorithms=["RS256"],
audience=["opensearch", "gendb"]
)
return payload
except jwt.ExpiredSignatureError:
return None
@ -103,4 +150,13 @@ class SessionManager:
payload = self.verify_token(token)
if payload:
return self.get_user(payload["user_id"])
return None
return None
def get_user_opensearch_client(self, user_id: str, jwt_token: str):
"""Get or create OpenSearch client for user with their JWT"""
# Check if we have a cached client for this user
if user_id not in self.user_opensearch_clients:
from config.settings import clients
self.user_opensearch_clients[user_id] = clients.create_user_opensearch_client(jwt_token)
return self.user_opensearch_clients[user_id]

70
uv.lock generated
View file

@ -1,5 +1,5 @@
version = 1
revision = 2
revision = 3
requires-python = ">=3.13"
resolution-markers = [
"sys_platform == 'darwin'",
@ -149,6 +149,28 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/66/f3/80a3f974c8b535d394ff960a11ac20368e06b736da395b551a49ce950cce/certifi-2025.7.9-py3-none-any.whl", hash = "sha256:d842783a14f8fdd646895ac26f719a061408834473cfc10203f6a575beb15d39", size = 159230, upload-time = "2025-07-09T02:13:57.007Z" },
]
[[package]]
name = "cffi"
version = "1.17.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pycparser" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" },
{ url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" },
{ url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" },
{ url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" },
{ url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" },
{ url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" },
{ url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" },
{ url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" },
{ url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" },
{ url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" },
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" },
]
[[package]]
name = "charset-normalizer"
version = "3.4.2"
@ -192,6 +214,41 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
]
[[package]]
name = "cryptography"
version = "45.0.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949, upload-time = "2025-08-05T23:59:27.93Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702, upload-time = "2025-08-05T23:58:23.464Z" },
{ url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483, upload-time = "2025-08-05T23:58:27.132Z" },
{ url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679, upload-time = "2025-08-05T23:58:29.152Z" },
{ url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553, upload-time = "2025-08-05T23:58:30.596Z" },
{ url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499, upload-time = "2025-08-05T23:58:32.03Z" },
{ url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484, upload-time = "2025-08-05T23:58:33.526Z" },
{ url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281, upload-time = "2025-08-05T23:58:35.445Z" },
{ url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890, upload-time = "2025-08-05T23:58:36.923Z" },
{ url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247, upload-time = "2025-08-05T23:58:38.781Z" },
{ url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045, upload-time = "2025-08-05T23:58:40.415Z" },
{ url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923, upload-time = "2025-08-05T23:58:41.919Z" },
{ url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805, upload-time = "2025-08-05T23:58:43.792Z" },
{ url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111, upload-time = "2025-08-05T23:58:45.316Z" },
{ url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169, upload-time = "2025-08-05T23:58:47.121Z" },
{ url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273, upload-time = "2025-08-05T23:58:48.557Z" },
{ url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211, upload-time = "2025-08-05T23:58:50.139Z" },
{ url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732, upload-time = "2025-08-05T23:58:52.253Z" },
{ url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655, upload-time = "2025-08-05T23:58:53.848Z" },
{ url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956, upload-time = "2025-08-05T23:58:55.209Z" },
{ url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859, upload-time = "2025-08-05T23:58:56.639Z" },
{ url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254, upload-time = "2025-08-05T23:58:58.833Z" },
{ url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815, upload-time = "2025-08-05T23:59:00.283Z" },
{ url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147, upload-time = "2025-08-05T23:59:01.716Z" },
{ url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459, upload-time = "2025-08-05T23:59:03.358Z" },
]
[[package]]
name = "dill"
version = "0.4.0"
@ -433,6 +490,7 @@ source = { virtual = "." }
dependencies = [
{ name = "agentd" },
{ name = "aiofiles" },
{ name = "cryptography" },
{ name = "docling" },
{ name = "google-api-python-client" },
{ name = "google-auth-httplib2" },
@ -450,6 +508,7 @@ dependencies = [
requires-dist = [
{ name = "agentd", specifier = ">=0.2.1" },
{ name = "aiofiles", specifier = ">=24.1.0" },
{ name = "cryptography", specifier = ">=45.0.6" },
{ name = "docling", specifier = ">=2.41.0" },
{ name = "google-api-python-client", specifier = ">=2.143.0" },
{ name = "google-auth-httplib2", specifier = ">=0.2.0" },
@ -1518,6 +1577,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d5/19/9ff4551b42f2068686c50c0d199072fa67aee57fc5cf86770cacf71efda3/pyclipper-1.3.0.post6-cp313-cp313-win_amd64.whl", hash = "sha256:e5ff68fa770ac654c7974fc78792978796f068bd274e95930c0691c31e192889", size = 109672, upload-time = "2024-10-18T12:22:30.411Z" },
]
[[package]]
name = "pycparser"
version = "2.22"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" },
]
[[package]]
name = "pydantic"
version = "2.11.7"