Initial clean commit - unified Lyra stack

This commit is contained in:
serversdwn
2025-11-16 03:17:32 -05:00
commit 94fb091e59
270 changed files with 74200 additions and 0 deletions

37
rag/rag_query.py Normal file
View File

@@ -0,0 +1,37 @@
# rag_query.py
import os, sys, chromadb
from openai import OpenAI
from dotenv import load_dotenv
load_dotenv()
query = " ".join(sys.argv[1:]) or input("Ask Lyra-Archive: ")
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
chroma = chromadb.PersistentClient(path="./chromadb")
collection = chroma.get_or_create_collection("lyra_chats")
# embed the question
q_emb = client.embeddings.create(
model="text-embedding-3-small",
input=query
).data[0].embedding
# search the collection
results = collection.query(query_embeddings=[q_emb], n_results=5)
print("\n🔍 Top related excerpts:\n")
for doc, meta in zip(results["documents"][0], results["metadatas"][0]):
print(f"📄 {meta['source']} ({meta['role']}) — {meta['title']}")
print(doc[:300].strip(), "\n---")
# synthesize an answer
context = "\n\n".join(results["documents"][0])
answer = client.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "system", "content": "Answer based only on the context below. Be concise and practical."},
{"role": "user", "content": f"Context:\n{context}\n\nQuestion: {query}"}
]
).choices[0].message.content
print("\n💡 Lyra-Archive Answer:\n", answer)