Skip to content

Commit 5c8134c

Browse files
committed
Add configurable default_infer and custom update prompt
- Add default_infer setting to control default behavior when infer parameter not specified in API/MCP calls - Add custom_update_memory_prompt setting for deduplication phase - Update config schema with detailed parameter descriptions for LLM agents - Apply config defaults in REST API and MCP endpoints using Optional[bool] pattern - Add UI controls in Settings page for both configuration options - Load config values from database and pass to mem0 core
1 parent 9ef644b commit 5c8134c

File tree

6 files changed

+116
-21
lines changed

6 files changed

+116
-21
lines changed

mem0/configs/base.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,10 @@ class MemoryConfig(BaseModel):
6464
description="Custom prompt for the update memory",
6565
default=None,
6666
)
67+
default_infer: bool = Field(
68+
description="Default value for infer when not specified in API call",
69+
default=True
70+
)
6771

6872

6973
class AzureConfig(BaseModel):

openmemory/api/app/mcp_server.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import json
2121
import logging
2222
import uuid
23+
from typing import Optional
2324

2425
from app.database import SessionLocal
2526
from app.models import Memory, MemoryAccessLog, MemoryState, MemoryStatusHistory
@@ -57,8 +58,8 @@ def get_memory_client_safe():
5758
# Initialize SSE transport
5859
sse = SseServerTransport("/mcp/messages/")
5960

60-
@mcp.tool(description="Add a new memory. This method is called everytime the user informs anything about themselves, their preferences, or anything that has any relevant information which can be useful in the future conversation. This can also be called when the user asks you to remember something.")
61-
async def add_memories(text: str) -> str:
61+
@mcp.tool(description="Add a new memory. This method is called everytime the user informs anything about themselves, their preferences, or anything that has any relevant information which can be useful in the future conversation. This can also be called when the user asks you to remember something. The 'infer' parameter controls processing: True (default) = LLM extracts semantic facts and deduplicates; False = stores exact verbatim text without transformation.")
62+
async def add_memories(text: str, infer: Optional[bool] = None) -> str:
6263
uid = user_id_var.get(None)
6364
client_name = client_name_var.get(None)
6465

@@ -82,12 +83,16 @@ async def add_memories(text: str) -> str:
8283
if not app.is_active:
8384
return f"Error: App {app.name} is currently paused on OpenMemory. Cannot create new memories."
8485

86+
# Apply default from config if not specified
87+
infer_value = infer if infer is not None else memory_client.config.default_infer
88+
8589
response = memory_client.add(text,
8690
user_id=uid,
8791
metadata={
8892
"source_app": "openmemory",
8993
"mcp_client": client_name,
90-
})
94+
},
95+
infer=infer_value)
9196

9297
# Process the response and update database
9398
if isinstance(response, dict) and 'results' in response:

openmemory/api/app/routers/config.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,18 @@ class EmbedderProvider(BaseModel):
3030
config: EmbedderConfig
3131

3232
class OpenMemoryConfig(BaseModel):
33-
custom_instructions: Optional[str] = Field(None, description="Custom instructions for memory management and fact extraction")
33+
custom_instructions: Optional[str] = Field(None, description="Custom prompt for fact extraction phase. Overrides default prompt used to extract semantic facts from input text.")
34+
custom_update_memory_prompt: Optional[str] = Field(None, description="Custom prompt for deduplication/update phase. Overrides default prompt used to determine ADD/UPDATE/DELETE/NONE decisions when comparing with existing memories.")
3435

3536
class Mem0Config(BaseModel):
3637
llm: Optional[LLMProvider] = None
3738
embedder: Optional[EmbedderProvider] = None
39+
default_infer: Optional[bool] = Field(
40+
None,
41+
description="Default value for infer parameter when not specified in API/MCP calls. "
42+
"When True: enables LLM fact extraction and deduplication. "
43+
"When False: stores verbatim text without transformation."
44+
)
3845

3946
class ConfigSchema(BaseModel):
4047
openmemory: Optional[OpenMemoryConfig] = None
@@ -44,7 +51,8 @@ def get_default_configuration():
4451
"""Get the default configuration with sensible defaults for LLM and embedder."""
4552
return {
4653
"openmemory": {
47-
"custom_instructions": None
54+
"custom_instructions": None,
55+
"custom_update_memory_prompt": None
4856
},
4957
"mem0": {
5058
"llm": {
@@ -62,7 +70,8 @@ def get_default_configuration():
6270
"model": "text-embedding-3-small",
6371
"api_key": "env:OPENAI_API_KEY"
6472
}
65-
}
73+
},
74+
"default_infer": True
6675
}
6776
}
6877

openmemory/api/app/routers/memories.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from fastapi import APIRouter, Depends, HTTPException, Query
2121
from fastapi_pagination import Page, Params
2222
from fastapi_pagination.ext.sqlalchemy import paginate as sqlalchemy_paginate
23-
from pydantic import BaseModel
23+
from pydantic import BaseModel, Field
2424
from sqlalchemy import func
2525
from sqlalchemy.orm import Session, joinedload
2626

@@ -203,7 +203,13 @@ class CreateMemoryRequest(BaseModel):
203203
user_id: str
204204
text: str
205205
metadata: dict = {}
206-
infer: bool = True
206+
infer: Optional[bool] = Field(
207+
None,
208+
description="Enable LLM processing for fact extraction and deduplication. "
209+
"When True: content is analyzed and transformed into semantic facts. "
210+
"When False: stores exact verbatim text without transformation. "
211+
"When None: uses default from server configuration (default_infer)."
212+
)
207213
app: str = "openmemory"
208214

209215

@@ -244,6 +250,9 @@ async def create_memory(
244250
"error": str(client_error)
245251
}
246252

253+
# Apply default from config if not specified
254+
infer_value = request.infer if request.infer is not None else memory_client.config.default_infer
255+
247256
# Try to save to Qdrant via memory_client
248257
try:
249258
qdrant_response = memory_client.add(
@@ -252,7 +261,8 @@ async def create_memory(
252261
metadata={
253262
"source_app": "openmemory",
254263
"mcp_client": request.app,
255-
}
264+
},
265+
infer=infer_value
256266
)
257267

258268
# Log the response for debugging

openmemory/api/app/utils/memory.py

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -304,10 +304,12 @@ def get_memory_client(custom_instructions: str = None):
304304
try:
305305
# Start with default configuration
306306
config = get_default_memory_config()
307-
308-
# Variable to track custom instructions
307+
308+
# Variables to track custom prompts and defaults from database
309309
db_custom_instructions = None
310-
310+
db_custom_update_memory_prompt = None
311+
db_default_infer = None
312+
311313
# Load configuration from database
312314
try:
313315
db = SessionLocal()
@@ -316,14 +318,21 @@ def get_memory_client(custom_instructions: str = None):
316318
if db_config:
317319
json_config = db_config.value
318320

319-
# Extract custom instructions from openmemory settings
320-
if "openmemory" in json_config and "custom_instructions" in json_config["openmemory"]:
321-
db_custom_instructions = json_config["openmemory"]["custom_instructions"]
322-
321+
# Extract custom prompts from openmemory settings
322+
if "openmemory" in json_config:
323+
if "custom_instructions" in json_config["openmemory"]:
324+
db_custom_instructions = json_config["openmemory"]["custom_instructions"]
325+
if "custom_update_memory_prompt" in json_config["openmemory"]:
326+
db_custom_update_memory_prompt = json_config["openmemory"]["custom_update_memory_prompt"]
327+
323328
# Override defaults with configurations from the database
324329
if "mem0" in json_config:
325330
mem0_config = json_config["mem0"]
326-
331+
332+
# Extract default flags from mem0 config
333+
if "default_infer" in mem0_config:
334+
db_default_infer = mem0_config["default_infer"]
335+
327336
# Update LLM configuration if available
328337
if "llm" in mem0_config and mem0_config["llm"] is not None:
329338
config["llm"] = mem0_config["llm"]
@@ -357,6 +366,17 @@ def get_memory_client(custom_instructions: str = None):
357366
if instructions_to_use:
358367
config["custom_fact_extraction_prompt"] = instructions_to_use
359368

369+
# Use database value for custom update memory prompt
370+
# Note: Empty string and None both mean "use mem0's hardcoded default prompt"
371+
update_prompt_to_use = db_custom_update_memory_prompt
372+
if update_prompt_to_use:
373+
config["custom_update_memory_prompt"] = update_prompt_to_use
374+
375+
# Use database value for default_infer
376+
# Note: Must use 'is not None' check to properly handle False value
377+
if db_default_infer is not None:
378+
config["default_infer"] = db_default_infer
379+
360380
# ALWAYS parse environment variables in the final config
361381
# This ensures that even default config values like "env:OPENAI_API_KEY" get parsed
362382
print("Parsing environment variables in final config...")

openmemory/ui/components/form-view.tsx

Lines changed: 51 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,16 @@ export function FormView({ settings, onChange }: FormViewProps) {
9797
})
9898
}
9999

100+
const handleMem0ConfigChange = (key: string, value: any) => {
101+
onChange({
102+
...settings,
103+
mem0: {
104+
...settings.mem0,
105+
[key]: value,
106+
},
107+
})
108+
}
109+
100110
const needsLlmApiKey = settings.mem0?.llm?.provider?.toLowerCase() !== "ollama"
101111
const needsEmbedderApiKey = settings.mem0?.embedder?.provider?.toLowerCase() !== "ollama"
102112
const isLlmOllama = settings.mem0?.llm?.provider?.toLowerCase() === "ollama"
@@ -143,16 +153,30 @@ export function FormView({ settings, onChange }: FormViewProps) {
143153
</CardHeader>
144154
<CardContent className="space-y-6">
145155
<div className="space-y-2">
146-
<Label htmlFor="custom-instructions">Custom Instructions</Label>
156+
<Label htmlFor="custom-instructions">Custom Fact Extraction Prompt</Label>
147157
<Textarea
148158
id="custom-instructions"
149-
placeholder="Enter custom instructions for memory management..."
159+
placeholder="Enter custom prompt for fact extraction phase..."
150160
value={settings.openmemory?.custom_instructions || ""}
151161
onChange={(e) => handleOpenMemoryChange("custom_instructions", e.target.value)}
152-
className="min-h-[100px]"
162+
className="min-h-[120px]"
153163
/>
154164
<p className="text-xs text-muted-foreground mt-1">
155-
Custom instructions that will be used to guide memory processing and fact extraction.
165+
Custom prompt used during fact extraction phase. Leave empty to use default prompt.
166+
</p>
167+
</div>
168+
169+
<div className="space-y-2">
170+
<Label htmlFor="custom-update-memory-prompt">Custom Update Memory Prompt</Label>
171+
<Textarea
172+
id="custom-update-memory-prompt"
173+
placeholder="Enter custom prompt for deduplication/update phase..."
174+
value={settings.openmemory?.custom_update_memory_prompt || ""}
175+
onChange={(e) => handleOpenMemoryChange("custom_update_memory_prompt", e.target.value)}
176+
className="min-h-[120px]"
177+
/>
178+
<p className="text-xs text-muted-foreground mt-1">
179+
Custom prompt used during deduplication phase (ADD/UPDATE/DELETE/NONE decisions). Leave empty to use default prompt.
156180
</p>
157181
</div>
158182
</CardContent>
@@ -352,6 +376,29 @@ export function FormView({ settings, onChange }: FormViewProps) {
352376
</CardContent>
353377
</Card>
354378

379+
{/* Default Memory Processing Settings */}
380+
<Card>
381+
<CardHeader>
382+
<CardTitle>Default Memory Processing Settings</CardTitle>
383+
<CardDescription>Configure default behavior for memory operations</CardDescription>
384+
</CardHeader>
385+
<CardContent className="space-y-6">
386+
<div className="flex items-center justify-between">
387+
<div className="space-y-0.5 flex-1">
388+
<Label htmlFor="default-infer">Default Infer</Label>
389+
<p className="text-xs text-muted-foreground">
390+
Enable LLM processing (extraction & deduplication) by default
391+
</p>
392+
</div>
393+
<Switch
394+
id="default-infer"
395+
checked={settings.mem0?.default_infer !== false}
396+
onCheckedChange={(checked) => handleMem0ConfigChange("default_infer", checked)}
397+
/>
398+
</div>
399+
</CardContent>
400+
</Card>
401+
355402
{/* Backup (Export / Import) */}
356403
<Card>
357404
<CardHeader>

0 commit comments

Comments
 (0)