import { SolVec } from '@veclabs/solvec';
interface Memory {
id: string;
content: string;
embedding: number[];
timestamp: Date;
sessionId: string;
importance?: number;
}
class AgentMemory {
private collection: any;
constructor(private sv: SolVec, private agentId: string) {}
async init(dimensions: number) {
this.collection = this.sv.collection(
`agent-${this.agentId}`,
{ dimensions, metric: 'cosine' }
);
}
// Store a new memory
async remember(content: string, embedding: number[], sessionId: string) {
const memory: Memory = {
id: `mem_${Date.now()}_${Math.random().toString(36).slice(2)}`,
content,
embedding,
timestamp: new Date(),
sessionId,
};
await this.collection.upsert([{
id: memory.id,
values: memory.embedding,
metadata: {
content: memory.content,
timestamp: memory.timestamp.toISOString(),
sessionId: memory.sessionId,
}
}]);
return memory.id;
}
// Recall relevant memories for a query
async recall(queryEmbedding: number[], topK = 5): Promise<Memory[]> {
const results = await this.collection.query({
vector: queryEmbedding,
topK,
minScore: 0.7 // only return meaningfully relevant memories
});
return results.map((r: any) => ({
id: r.id,
content: r.metadata.content,
embedding: [],
timestamp: new Date(r.metadata.timestamp),
sessionId: r.metadata.sessionId,
score: r.score,
}));
}
// Verify memory integrity on-chain
async audit() {
const proof = await this.collection.verify();
console.log(`Memory verified on-chain: ${proof.verified}`);
console.log(`Explorer: ${proof.solanaExplorerUrl}`);
return proof;
}
}
// Usage
async function chat(userMessage: string, sessionId: string) {
const sv = new SolVec({ network: 'devnet' });
const memory = new AgentMemory(sv, 'my-assistant');
await memory.init(1536);
// 1. Embed the user message
const queryEmbedding = await embed(userMessage); // your embedding function
// 2. Recall relevant memories
const relevantMemories = await memory.recall(queryEmbedding);
// 3. Build context from memories
const context = relevantMemories.length > 0
? `Relevant context from memory:\n${relevantMemories.map(m => `- ${m.content}`).join('\n')}\n\n`
: '';
// 4. Generate response with context
const response = await callLLM(`${context}User: ${userMessage}`);
// 5. Store the interaction as a new memory
const interactionEmbedding = await embed(`${userMessage} ${response}`);
await memory.remember(
`User asked: "${userMessage}". I responded: "${response}"`,
interactionEmbedding,
sessionId
);
return response;
}
async function embed(text: string): Promise<number[]> {
// Replace with your embedding provider
// OpenAI: openai.embeddings.create(...)
// Cohere: cohere.embed(...)
// Local: sentence-transformers
return Array(1536).fill(0).map(() => Math.random());
}