cortex 0.2.... i think?
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
# reasoning.py
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from llm.llm_router import call_llm
|
||||
|
||||
|
||||
@@ -9,6 +10,34 @@ from llm.llm_router import call_llm
|
||||
# ============================================================
|
||||
CORTEX_LLM = os.getenv("CORTEX_LLM", "PRIMARY").upper()
|
||||
GLOBAL_TEMP = float(os.getenv("LLM_TEMPERATURE", "0.7"))
|
||||
VERBOSE_DEBUG = os.getenv("VERBOSE_DEBUG", "false").lower() == "true"
|
||||
|
||||
# Logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Console handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s [REASONING] %(levelname)s: %(message)s',
|
||||
datefmt='%H:%M:%S'
|
||||
))
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# File handler
|
||||
try:
|
||||
os.makedirs('/app/logs', exist_ok=True)
|
||||
file_handler = logging.FileHandler('/app/logs/cortex_verbose_debug.log', mode='a')
|
||||
file_handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s [REASONING] %(levelname)s: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
))
|
||||
logger.addHandler(file_handler)
|
||||
logger.debug("VERBOSE_DEBUG mode enabled for reasoning.py - logging to file")
|
||||
except Exception as e:
|
||||
logger.debug(f"VERBOSE_DEBUG mode enabled for reasoning.py - file logging failed: {e}")
|
||||
|
||||
|
||||
async def reason_check(
|
||||
@@ -147,10 +176,26 @@ async def reason_check(
|
||||
# --------------------------------------------------------
|
||||
# Call the LLM using the module-specific backend
|
||||
# --------------------------------------------------------
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug(f"\n{'='*80}")
|
||||
logger.debug("[REASONING] Full prompt being sent to LLM:")
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(prompt)
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(f"Backend: {CORTEX_LLM}, Temperature: {GLOBAL_TEMP}")
|
||||
logger.debug(f"{'='*80}\n")
|
||||
|
||||
draft = await call_llm(
|
||||
prompt,
|
||||
backend=CORTEX_LLM,
|
||||
temperature=GLOBAL_TEMP,
|
||||
)
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug(f"\n{'='*80}")
|
||||
logger.debug("[REASONING] LLM Response received:")
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(draft)
|
||||
logger.debug(f"{'='*80}\n")
|
||||
|
||||
return draft
|
||||
|
||||
@@ -15,11 +15,36 @@ logger = logging.getLogger(__name__)
|
||||
REFINER_TEMPERATURE = float(os.getenv("REFINER_TEMPERATURE", "0.3"))
|
||||
REFINER_MAX_TOKENS = int(os.getenv("REFINER_MAX_TOKENS", "768"))
|
||||
REFINER_DEBUG = os.getenv("REFINER_DEBUG", "false").lower() == "true"
|
||||
VERBOSE_DEBUG = os.getenv("VERBOSE_DEBUG", "false").lower() == "true"
|
||||
|
||||
# These come from root .env
|
||||
REFINE_LLM = os.getenv("REFINE_LLM", "").upper()
|
||||
CORTEX_LLM = os.getenv("CORTEX_LLM", "PRIMARY").upper()
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Console handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s [REFINE] %(levelname)s: %(message)s',
|
||||
datefmt='%H:%M:%S'
|
||||
))
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# File handler
|
||||
try:
|
||||
os.makedirs('/app/logs', exist_ok=True)
|
||||
file_handler = logging.FileHandler('/app/logs/cortex_verbose_debug.log', mode='a')
|
||||
file_handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s [REFINE] %(levelname)s: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
))
|
||||
logger.addHandler(file_handler)
|
||||
logger.debug("VERBOSE_DEBUG mode enabled for refine.py - logging to file")
|
||||
except Exception as e:
|
||||
logger.debug(f"VERBOSE_DEBUG mode enabled for refine.py - file logging failed: {e}")
|
||||
|
||||
|
||||
# ===============================================
|
||||
# Prompt builder
|
||||
@@ -103,6 +128,15 @@ async def refine_answer(
|
||||
# backend priority: REFINE_LLM → CORTEX_LLM → PRIMARY
|
||||
backend = REFINE_LLM or CORTEX_LLM or "PRIMARY"
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug(f"\n{'='*80}")
|
||||
logger.debug("[REFINE] Full prompt being sent to LLM:")
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(prompt)
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(f"Backend: {backend}, Temperature: {REFINER_TEMPERATURE}")
|
||||
logger.debug(f"{'='*80}\n")
|
||||
|
||||
try:
|
||||
refined = await call_llm(
|
||||
prompt,
|
||||
@@ -110,6 +144,13 @@ async def refine_answer(
|
||||
temperature=REFINER_TEMPERATURE,
|
||||
)
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug(f"\n{'='*80}")
|
||||
logger.debug("[REFINE] LLM Response received:")
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(refined)
|
||||
logger.debug(f"{'='*80}\n")
|
||||
|
||||
return {
|
||||
"final_output": refined.strip() if refined else draft_output,
|
||||
"used_backend": backend,
|
||||
@@ -119,6 +160,9 @@ async def refine_answer(
|
||||
except Exception as e:
|
||||
logger.error(f"refine.py backend {backend} failed: {e}")
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug("[REFINE] Falling back to draft output due to error")
|
||||
|
||||
return {
|
||||
"final_output": draft_output,
|
||||
"used_backend": backend,
|
||||
|
||||
@@ -2,8 +2,37 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
from llm.llm_router import call_llm
|
||||
|
||||
# Logger
|
||||
VERBOSE_DEBUG = os.getenv("VERBOSE_DEBUG", "false").lower() == "true"
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Console handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s [REFLECTION] %(levelname)s: %(message)s',
|
||||
datefmt='%H:%M:%S'
|
||||
))
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# File handler
|
||||
try:
|
||||
os.makedirs('/app/logs', exist_ok=True)
|
||||
file_handler = logging.FileHandler('/app/logs/cortex_verbose_debug.log', mode='a')
|
||||
file_handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s [REFLECTION] %(levelname)s: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
))
|
||||
logger.addHandler(file_handler)
|
||||
logger.debug("VERBOSE_DEBUG mode enabled for reflection.py - logging to file")
|
||||
except Exception as e:
|
||||
logger.debug(f"VERBOSE_DEBUG mode enabled for reflection.py - file logging failed: {e}")
|
||||
|
||||
|
||||
async def reflect_notes(intake_summary: str, identity_block: dict | None) -> dict:
|
||||
"""
|
||||
@@ -46,8 +75,23 @@ async def reflect_notes(intake_summary: str, identity_block: dict | None) -> dic
|
||||
# -----------------------------
|
||||
# Call the selected LLM backend
|
||||
# -----------------------------
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug(f"\n{'='*80}")
|
||||
logger.debug("[REFLECTION] Full prompt being sent to LLM:")
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(prompt)
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(f"Backend: {backend}")
|
||||
logger.debug(f"{'='*80}\n")
|
||||
|
||||
raw = await call_llm(prompt, backend=backend)
|
||||
print("[Reflection-Raw]:", raw)
|
||||
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug(f"\n{'='*80}")
|
||||
logger.debug("[REFLECTION] LLM Response received:")
|
||||
logger.debug(f"{'='*80}")
|
||||
logger.debug(raw)
|
||||
logger.debug(f"{'='*80}\n")
|
||||
|
||||
# -----------------------------
|
||||
# Try direct JSON
|
||||
@@ -55,9 +99,12 @@ async def reflect_notes(intake_summary: str, identity_block: dict | None) -> dic
|
||||
try:
|
||||
parsed = json.loads(raw.strip())
|
||||
if isinstance(parsed, dict) and "notes" in parsed:
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug(f"[REFLECTION] Parsed {len(parsed['notes'])} notes from JSON")
|
||||
return parsed
|
||||
except:
|
||||
pass
|
||||
if VERBOSE_DEBUG:
|
||||
logger.debug("[REFLECTION] Direct JSON parsing failed, trying extraction...")
|
||||
|
||||
# -----------------------------
|
||||
# Try JSON extraction
|
||||
|
||||
Reference in New Issue
Block a user