reorganizing and restructuring

This commit is contained in:
serversdwn
2025-11-26 02:28:00 -05:00
parent 5492d9c0c5
commit cb00474ab3
8 changed files with 537 additions and 87 deletions

View File

@@ -8,7 +8,7 @@ class IntakeClient:
"""Handles short-term / episodic summaries from Intake service."""
def __init__(self):
self.base_url = os.getenv("INTAKE_API", "http://intake:7080")
self.base_url = os.getenv("INTAKE_API", "http://intake:7083")
async def summarize_turn(self, session_id: str, user_msg: str, assistant_msg: Optional[str] = None) -> Dict[str, Any]:
payload = {

View File

@@ -1,86 +1,6 @@
from fastapi import FastAPI
from pydantic import BaseModel
from reasoning import reason_check
from reflection import reflect_notes
from rag import query_rag
from ingest_handler import handle_ingest
from refine import refine_answer
from router import router
# ---------------------------------------------------
# Create the app BEFORE using it
# ---------------------------------------------------
app = FastAPI()
# ---------------------------------------------------
# Models
# ---------------------------------------------------
class ReasonRequest(BaseModel):
prompt: str
session_id: str | None = None
class IngestRequest(BaseModel):
user: str
assistant: str | None = None
session_id: str | None = None
# ---------------------------------------------------
# Load identity
# ---------------------------------------------------
IDENTITY = None
# ---------------------------------------------------
# Routes MUST come after app = FastAPI()
# ---------------------------------------------------
@app.get("/health")
def health():
return {
"status": "ok",
"identity_loaded": IDENTITY is not None
}
@app.post("/ingest")
async def ingest(data: IngestRequest):
await handle_ingest(data)
return {"status": "ok"}
@app.post("/reason")
async def reason(data: ReasonRequest):
user_prompt = data.prompt
intake_summary = "recent summary"
identity_block = IDENTITY
rag_block = query_rag(user_prompt)
reflection_data = await reflect_notes(intake_summary, identity_block)
notes = reflection_data.get("notes", [])
draft = await reason_check(
user_prompt,
identity_block,
rag_block,
notes
)
# --- REFINE STEP ----------------------------------------------------
refine_result = refine_answer(
draft_output=draft,
reflection_notes=notes,
identity_block=identity_block,
rag_block=rag_block,
)
final_output = refine_result["final_output"]
return {
"draft_output": draft,
"reflection_notes": notes,
"refined_output": final_output,
"refine_meta": {
"used_primary_backend": refine_result.get("used_primary_backend"),
"fallback_used": refine_result.get("fallback_used")
},
"identity_used": identity_block is not None,
"rag_used": rag_block is not None
}
app.include_router(router)

View File

@@ -0,0 +1,7 @@
def apply_persona(text: str) -> str:
"""
Persona layer.
Right now it passes text unchanged.
Later we will add Lyra-voice transformation here.
"""
return text or ""

View File

@@ -1,5 +1,5 @@
# reasoning.py
from llm_router import call_llm
from llm.llm_router import call_llm
async def reason_check(user_prompt: str,
identity_block: dict | None,

View File

@@ -1,5 +1,5 @@
# reflection.py
from llm_router import call_llm
from llm.llm_router import call_llm
import json
@@ -30,7 +30,7 @@ async def reflect_notes(intake_summary: str, identity_block: dict | None) -> dic
)
raw = await call_llm(prompt, backend="cloud")
raw = await call_llm(prompt, backend="primary")
print("[Reflection-Raw]:", raw)

63
cortex/router.py Normal file
View File

@@ -0,0 +1,63 @@
from fastapi import APIRouter
from pydantic import BaseModel
from typing import Optional, List, Any
from reasoning.reasoning import reason_check
from reasoning.reflection import reflect_notes
from reasoning.refine import refine_answer
from persona.speak import apply_persona
from ingest.intake_client import IntakeClient
router = APIRouter()
# ------------------------------------------------------
# Request schema
# ------------------------------------------------------
class ReasonRequest(BaseModel):
session_id: Optional[str]
user_prompt: str
temperature: float = 0.7
# ------------------------------------------------------
# /reason endpoint
# ------------------------------------------------------
@router.post("/reason")
async def run_reason(req: ReasonRequest):
# 1. Summaries from Intake (context memory)
intake = IntakeClient()
intake_summary = await intake.get_context(req.session_id)
# 2. Internal reflection notes
reflection = await reflect_notes(intake_summary, identity_block=None)
reflection_notes: List[str] = reflection.get("notes", [])
# 3. Draft answer (weak, unfiltered)
draft = await reason_check(
user_prompt=req.user_prompt,
identity_block=None,
rag_block=None,
reflection_notes=reflection_notes,
)
# 4. Refine the answer (structured self-correction)
refined_packet: dict[str, Any] = refine_answer(
draft_output=draft,
reflection_notes=reflection_notes,
identity_block=None,
rag_block=None,
)
refined_text = refined_packet.get("final_output", draft)
# 5. Persona styling (Lyra voice)
final_output = apply_persona(refined_text)
return {
"draft": draft,
"refined": refined_text,
"final": final_output,
"reflection_notes": reflection_notes,
"session_id": req.session_id,
}