Loading...
Loading...
Persistent memory layer for AI agents using Postgres/pgvector with MCP server support
npx skill4agent add aradotso/trending-skills stash-ai-memorySkill by ara.so — Daily 2026 Skills collection.
Agent ──► MCP Server ──► Postgres + pgvector
│
└──► Background Consolidation Pipeline
(Episodes → Facts → Relationships →
Causal Links → Goals → Failures →
Hypotheses → Confidence Decay)git clone https://github.com/alash3al/stash.git
cd stash
cp .env.example .env
# Edit .env with your LLM API key and model
docker compose up# .env
# LLM provider (OpenAI-compatible endpoint)
LLM_BASE_URL=https://api.openai.com/v1
LLM_API_KEY=$OPENAI_API_KEY
LLM_MODEL=gpt-4o-mini
# Or use Ollama (local)
# LLM_BASE_URL=http://localhost:11434/v1
# LLM_API_KEY=ollama
# LLM_MODEL=llama3.2
# Or OpenRouter
# LLM_BASE_URL=https://openrouter.ai/api/v1
# LLM_API_KEY=$OPENROUTER_API_KEY
# LLM_MODEL=anthropic/claude-3-haiku
# Postgres connection
DATABASE_URL=postgres://stash:stash@localhost:5432/stash?sslmode=disable
# MCP server
MCP_SERVER_ADDR=:8080
# Consolidation pipeline interval
CONSOLIDATION_INTERVAL=5mgit clone https://github.com/alash3al/stash.git
cd stash
# Build the binary
go build -o stash ./cmd/stash
# Run migrations and start server
./stash serve~/Library/Application Support/Claude/claude_desktop_config.json{
"mcpServers": {
"stash": {
"url": "http://localhost:8080/mcp",
"transport": "http"
}
}
}.cursor/mcp.json{
"mcpServers": {
"stash": {
"url": "http://localhost:8080/mcp",
"transport": "http"
}
}
}~/.continue/config.json{
"experimental": {
"modelContextProtocolServers": [
{
"transport": {
"type": "http",
"url": "http://localhost:8080/mcp"
}
}
]
}
}| Tool | Purpose |
|---|---|
| Store an episode or observation |
| Semantic search across memory |
| Query consolidated facts |
| Get/set working context |
| Remove specific memories |
package main
import (
"context"
"fmt"
"log"
"github.com/alash3al/stash/pkg/client"
)
func main() {
c, err := client.New(client.Config{
BaseURL: "http://localhost:8080",
})
if err != nil {
log.Fatal(err)
}
ctx := context.Background()
// Store an episode
err = c.Remember(ctx, client.Episode{
AgentID: "my-agent",
Content: "User prefers dark mode and uses vim keybindings",
Tags: []string{"preferences", "ui"},
})
if err != nil {
log.Fatal(err)
}
// Recall relevant memories
results, err := c.Recall(ctx, client.RecallQuery{
AgentID: "my-agent",
Query: "what are the user's editor preferences?",
Limit: 5,
})
if err != nil {
log.Fatal(err)
}
for _, r := range results {
fmt.Printf("[%.2f] %s\n", r.Score, r.Content)
}
}# docker-compose.yml (from repo)
services:
postgres:
image: pgvector/pgvector:pg16
environment:
POSTGRES_USER: stash
POSTGRES_PASSWORD: stash
POSTGRES_DB: stash
volumes:
- pgdata:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U stash"]
interval: 5s
timeout: 5s
retries: 5
stash:
build: .
env_file: .env
environment:
DATABASE_URL: postgres://stash:stash@postgres:5432/stash?sslmode=disable
ports:
- "8080:8080"
depends_on:
postgres:
condition: service_healthy
volumes:
pgdata:curl -X POST http://localhost:8080/consolidate# Set context
curl -X PUT http://localhost:8080/api/context/my-agent \
-H "Content-Type: application/json" \
-d '{"key": "current_task", "value": "debugging auth middleware"}'
# Get context
curl http://localhost:8080/api/context/my-agentimport requests
STASH_URL = "http://localhost:8080"
def remember(agent_id: str, content: str, tags: list[str] = None):
requests.post(f"{STASH_URL}/api/episodes", json={
"agent_id": agent_id,
"content": content,
"tags": tags or [],
})
def recall(agent_id: str, query: str, limit: int = 5) -> list[dict]:
r = requests.post(f"{STASH_URL}/api/recall", json={
"agent_id": agent_id,
"query": query,
"limit": limit,
})
return r.json().get("results", [])
# Usage
remember("assistant-1", "User is building a Go microservice with gRPC")
memories = recall("assistant-1", "what is the user working on?")
for m in memories:
print(f"[{m['score']:.2f}] {m['content']}")def build_system_prompt(agent_id: str, base_prompt: str, user_message: str) -> str:
memories = recall(agent_id, user_message, limit=10)
if not memories:
return base_prompt
memory_block = "\n".join(f"- {m['content']}" for m in memories)
return f"""{base_prompt}
## Relevant Memory
{memory_block}
"""from agents import Agent, Runner
from agents.mcp import MCPServerHTTP
stash_mcp = MCPServerHTTP(url="http://localhost:8080/mcp")
agent = Agent(
name="my-agent",
instructions="You have persistent memory. Use stash tools to remember and recall.",
mcp_servers=[stash_mcp],
)
result = Runner.run_sync(agent, "What do you remember about my coding preferences?")
print(result.final_output)# Check pgvector extension is available
docker exec -it stash-postgres-1 psql -U stash -c "SELECT * FROM pg_extension WHERE extname='vector';"
# If missing, install it
docker exec -it stash-postgres-1 psql -U stash -c "CREATE EXTENSION vector;"http://localhost:8080/mcphttpscurl http://localhost:8080/mcp# Check logs for consolidation pipeline errors
docker compose logs stash | grep -i consolidat
# Verify LLM credentials are correct — consolidation uses the LLM to extract facts
curl $LLM_BASE_URL/models -H "Authorization: Bearer $LLM_API_KEY"curl http://localhost:8080/api/episodes?agent_id=my-agent# Nuclear option: wipe and restart
docker compose down -v
docker compose up| Method | Path | Description |
|---|---|---|
| | Store a new episode |
| | Semantic recall query |
| | List consolidated facts |
| | Working context |
| | Forget an episode |
| | Trigger consolidation manually |
| | Health check |
| | MCP protocol endpoint |
CONSOLIDATION_INTERVAL5m