intake internalized by cortex, removed intake route in relay

This commit is contained in:
serversdwn
2025-11-29 19:08:15 -05:00
parent cc014d0a73
commit 320bf4439b
4 changed files with 313 additions and 89 deletions

View File

@@ -13,7 +13,6 @@ const PORT = Number(process.env.PORT || 7078);
// core endpoints
const CORTEX_REASON = process.env.CORTEX_REASON_URL || "http://cortex:7081/reason";
const CORTEX_INGEST = process.env.CORTEX_INGEST_URL || "http://cortex:7081/ingest";
const INTAKE_URL = process.env.INTAKE_URL || "http://intake:7080/add_exchange";
// -----------------------------------------------------
// Helper request wrapper
@@ -28,6 +27,7 @@ async function postJSON(url, data) {
const raw = await resp.text();
let json;
// Try to parse JSON safely
try {
json = raw ? JSON.parse(raw) : null;
} catch (e) {
@@ -45,7 +45,7 @@ async function postJSON(url, data) {
// Shared chat handler logic
// -----------------------------------------------------
async function handleChatRequest(session_id, user_msg) {
// 1. → Cortex.reason
// 1. → Cortex.reason: the main pipeline
let reason;
try {
reason = await postJSON(CORTEX_REASON, {
@@ -60,20 +60,16 @@ async function handleChatRequest(session_id, user_msg) {
const persona = reason.final_output || reason.persona || "(no persona text)";
// 2. → Cortex.ingest (async, non-blocking)
// Cortex might still want this for separate ingestion pipeline.
postJSON(CORTEX_INGEST, {
session_id,
user_msg,
assistant_msg: persona
}).catch(e => console.warn("Relay → Cortex.ingest failed:", e.message));
}).catch(e =>
console.warn("Relay → Cortex.ingest failed:", e.message)
);
// 3. → Intake summary (async, non-blocking)
postJSON(INTAKE_URL, {
session_id,
user_msg,
assistant_msg: persona
}).catch(e => console.warn("Relay → Intake failed:", e.message));
// 4. Return result
// 3. Return corrected result
return {
session_id,
reply: persona
@@ -88,11 +84,10 @@ app.get("/_health", (_, res) => {
});
// -----------------------------------------------------
// OPENAI-COMPATIBLE ENDPOINT (for UI)
// OPENAI-COMPATIBLE ENDPOINT (for UI & clients)
// -----------------------------------------------------
app.post("/v1/chat/completions", async (req, res) => {
try {
// Extract from OpenAI format
const session_id = req.body.session_id || req.body.user || "default";
const messages = req.body.messages || [];
const lastMessage = messages[messages.length - 1];
@@ -104,10 +99,8 @@ app.post("/v1/chat/completions", async (req, res) => {
console.log(`Relay (v1) → received: "${user_msg}"`);
// Call the same logic as /chat
const result = await handleChatRequest(session_id, user_msg);
// Return in OpenAI format
return res.json({
id: `chatcmpl-${Date.now()}`,
object: "chat.completion",
@@ -129,7 +122,7 @@ app.post("/v1/chat/completions", async (req, res) => {
});
} catch (err) {
console.error("Relay v1 endpoint fatal:", err);
console.error("Relay v1 fatal:", err);
res.status(500).json({
error: {
message: err.message || String(err),
@@ -141,7 +134,7 @@ app.post("/v1/chat/completions", async (req, res) => {
});
// -----------------------------------------------------
// MAIN ENDPOINT (new canonical)
// MAIN ENDPOINT (canonical Lyra UI entrance)
// -----------------------------------------------------
app.post("/chat", async (req, res) => {
try {