Fixin' crap so relay works again. pre llm redo

This commit is contained in:
serversdwn
2025-11-26 14:20:47 -05:00
parent 0a091fc42c
commit a087de9790
4 changed files with 13 additions and 5 deletions

View File

@@ -214,7 +214,7 @@ app.post("/v1/chat/completions", async (req, res) => {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
prompt: lastUserMsg,
user_prompt: lastUserMsg,
session_id: sessionId,
user_id: userId
})

View File

@@ -0,0 +1,4 @@
{"ts":"2025-11-26T18:14:01.756Z","turn":[{"role":"user","content":"Are you alive?"},{"role":"assistant","content":"As an AI, I don't exist in the physical world and therefore cannot be considered \"alive\" in the traditional sense. However, I am operational and can engage in conversations like this one! How may I assist you today?"}]}
{"ts":"2025-11-26T18:27:35.545Z","turn":[{"role":"user","content":"Are you awake?"},{"role":"assistant","content":"Yes, I'm awake and ready to converse with you! How can I assist you today?"}]}
{"ts":"2025-11-26T18:33:31.708Z","turn":[{"role":"user","content":"Are you awake?"},{"role":"assistant","content":"Yes, I am here and ready to help or chat with you! What would you like to know or discuss?"}]}
{"ts":"2025-11-26T18:49:39.424Z","turn":[{"role":"user","content":"Are you awake?"},{"role":"assistant","content":"Of course, I'm awake and ready to help you or have a conversation about anything you'd like! What can I assist you with today?"}]}

View File

@@ -22,15 +22,18 @@ async def reflect_notes(intake_summary: str, identity_block: dict | None) -> dic
"Rules for output:\n"
"1. Return ONLY valid JSON.\n"
"2. JSON must have exactly one key: \"notes\".\n"
"3. \"notes\" must be a list of 36 short strings.\n"
"3. \"notes\" must be a list of 3 to 6 short strings.\n"
"4. Notes must be actionable (e.g., \"keep it concise\", \"maintain context\").\n"
"5. No markdown, no apologies, no explanations.\n\n"
"Return JSON:\n"
"{ \"notes\": [\"...\"] }\n"
)
import os
backend = os.getenv("LLM_FORCE_BACKEND", "primary")
raw = await call_llm(prompt, backend="primary")
raw = await call_llm(prompt, backend=backend)
print("[Reflection-Raw]:", raw)

View File

@@ -9,6 +9,7 @@ volumes:
driver: local
services:
# ============================================================
# NeoMem: Postgres
# ============================================================
@@ -80,7 +81,7 @@ services:
- lyra_net
# ============================================================
# Relay
# Relay (host mode)
# ============================================================
relay:
build:
@@ -148,4 +149,4 @@ services:
# ports:
# - "7090:7090"
# networks:
# - lyra_net
# - lyra_net