import { SolVec } from "@veclabs/solvec";
async function semanticSearch(userQuery: string) {
const sv = new SolVec({ network: "devnet" });
const collection = sv.collection("knowledge-base", { dimensions: 1536 });
// 1. Embed the query
const queryEmbedding = await embed(userQuery);
// 2. Search
const results = await collection.query({
vector: queryEmbedding,
topK: 5,
minScore: 0.7,
});
// 3. Handle no results
if (results.length === 0) {
return { answer: "No relevant information found.", sources: [] };
}
// 4. Build LLM context from results
const context = results.map((r) => r.metadata.text).join("\n\n");
// 5. Generate answer
const answer = await callLLM(
`Answer using this context:\n\n${context}\n\nQuestion: ${userQuery}`,
);
return {
answer,
sources: results.map((r) => ({
id: r.id,
score: r.score,
source: r.metadata.source,
text: r.metadata.text.slice(0, 200) + "...",
})),
};
}