mode selection, settings added to ui
This commit is contained in:
@@ -44,6 +44,7 @@ class ReasonRequest(BaseModel):
|
||||
session_id: str
|
||||
user_prompt: str
|
||||
temperature: float | None = None
|
||||
backend: str | None = None
|
||||
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
@@ -388,8 +389,11 @@ async def run_simple(req: ReasonRequest):
|
||||
|
||||
logger.info(f"📨 Total messages being sent to LLM: {len(messages)} (including system message)")
|
||||
|
||||
# Get backend from env (default to OPENAI for standard mode)
|
||||
backend = os.getenv("STANDARD_MODE_LLM", "OPENAI")
|
||||
# Get backend from request, otherwise fall back to env variable
|
||||
backend = req.backend if req.backend else os.getenv("STANDARD_MODE_LLM", "SECONDARY")
|
||||
backend = backend.upper() # Normalize to uppercase
|
||||
logger.info(f"🔧 Using backend: {backend}")
|
||||
|
||||
temperature = req.temperature if req.temperature is not None else 0.7
|
||||
|
||||
# Direct LLM call with messages (works for Ollama/OpenAI chat APIs)
|
||||
|
||||
Reference in New Issue
Block a user