Changed models to get parameters by settings if not existent

This commit is contained in:
Lucas Oliveira 2025-09-24 09:50:54 -03:00
parent 9a14192fd3
commit fad175efe3

View file

@ -17,14 +17,18 @@ async def get_openai_models(request, models_service, session_manager):
try: try:
config = get_openrag_config() config = get_openrag_config()
api_key = config.provider.api_key api_key = config.provider.api_key
logger.info(f"Retrieved API key from config: {'yes' if api_key else 'no'}") logger.info(
f"Retrieved API key from config: {'yes' if api_key else 'no'}"
)
except Exception as e: except Exception as e:
logger.error(f"Failed to get config: {e}") logger.error(f"Failed to get config: {e}")
if not api_key: if not api_key:
return JSONResponse( return JSONResponse(
{"error": "OpenAI API key is required either as query parameter or in configuration"}, {
status_code=400 "error": "OpenAI API key is required either as query parameter or in configuration"
},
status_code=400,
) )
models = await models_service.get_openai_models(api_key=api_key) models = await models_service.get_openai_models(api_key=api_key)
@ -32,8 +36,7 @@ async def get_openai_models(request, models_service, session_manager):
except Exception as e: except Exception as e:
logger.error(f"Failed to get OpenAI models: {str(e)}") logger.error(f"Failed to get OpenAI models: {str(e)}")
return JSONResponse( return JSONResponse(
{"error": f"Failed to retrieve OpenAI models: {str(e)}"}, {"error": f"Failed to retrieve OpenAI models: {str(e)}"}, status_code=500
status_code=500
) )
@ -44,13 +47,31 @@ async def get_ollama_models(request, models_service, session_manager):
query_params = dict(request.query_params) query_params = dict(request.query_params)
endpoint = query_params.get("endpoint") endpoint = query_params.get("endpoint")
# If no API key provided, try to get it from stored configuration
if not endpoint:
try:
config = get_openrag_config()
endpoint = config.provider.endpoint
logger.info(
f"Retrieved endpoint from config: {'yes' if endpoint else 'no'}"
)
except Exception as e:
logger.error(f"Failed to get config: {e}")
if not endpoint:
return JSONResponse(
{
"error": "Endpoint is required either as query parameter or in configuration"
},
status_code=400,
)
models = await models_service.get_ollama_models(endpoint=endpoint) models = await models_service.get_ollama_models(endpoint=endpoint)
return JSONResponse(models) return JSONResponse(models)
except Exception as e: except Exception as e:
logger.error(f"Failed to get Ollama models: {str(e)}") logger.error(f"Failed to get Ollama models: {str(e)}")
return JSONResponse( return JSONResponse(
{"error": f"Failed to retrieve Ollama models: {str(e)}"}, {"error": f"Failed to retrieve Ollama models: {str(e)}"}, status_code=500
status_code=500
) )
@ -63,15 +84,65 @@ async def get_ibm_models(request, models_service, session_manager):
api_key = query_params.get("api_key") api_key = query_params.get("api_key")
project_id = query_params.get("project_id") project_id = query_params.get("project_id")
config = get_openrag_config()
# If no API key provided, try to get it from stored configuration
if not api_key:
try:
api_key = config.provider.api_key
logger.info(
f"Retrieved API key from config: {'yes' if api_key else 'no'}"
)
except Exception as e:
logger.error(f"Failed to get config: {e}")
if not api_key:
return JSONResponse(
{
"error": "OpenAI API key is required either as query parameter or in configuration"
},
status_code=400,
)
if not endpoint:
try:
endpoint = config.provider.endpoint
logger.info(
f"Retrieved endpoint from config: {'yes' if endpoint else 'no'}"
)
except Exception as e:
logger.error(f"Failed to get config: {e}")
if not endpoint:
return JSONResponse(
{
"error": "Endpoint is required either as query parameter or in configuration"
},
status_code=400,
)
if not project_id:
try:
project_id = config.provider.project_id
logger.info(
f"Retrieved project ID from config: {'yes' if project_id else 'no'}"
)
except Exception as e:
logger.error(f"Failed to get config: {e}")
if not project_id:
return JSONResponse(
{
"error": "Project ID is required either as query parameter or in configuration"
},
status_code=400,
)
models = await models_service.get_ibm_models( models = await models_service.get_ibm_models(
endpoint=endpoint, endpoint=endpoint, api_key=api_key, project_id=project_id
api_key=api_key,
project_id=project_id
) )
return JSONResponse(models) return JSONResponse(models)
except Exception as e: except Exception as e:
logger.error(f"Failed to get IBM models: {str(e)}") logger.error(f"Failed to get IBM models: {str(e)}")
return JSONResponse( return JSONResponse(
{"error": f"Failed to retrieve IBM models: {str(e)}"}, {"error": f"Failed to retrieve IBM models: {str(e)}"}, status_code=500
status_code=500 )
)