Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions mem0/configs/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,10 @@ class MemoryConfig(BaseModel):
description="Custom prompt for the update memory",
default=None,
)
default_infer: bool = Field(
description="Default value for infer when not specified in API call",
default=True
)


class AzureConfig(BaseModel):
Expand Down
16 changes: 11 additions & 5 deletions openmemory/api/app/mcp_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import json
import logging
import uuid
from typing import Optional

from app.database import SessionLocal
from app.models import Memory, MemoryAccessLog, MemoryState, MemoryStatusHistory
Expand Down Expand Up @@ -57,8 +58,8 @@ def get_memory_client_safe():
# Initialize SSE transport
sse = SseServerTransport("/mcp/messages/")

@mcp.tool(description="Add a new memory. This method is called everytime the user informs anything about themselves, their preferences, or anything that has any relevant information which can be useful in the future conversation. This can also be called when the user asks you to remember something.")
async def add_memories(text: str) -> str:
@mcp.tool(description="Add a new memory. This method is called everytime the user informs anything about themselves, their preferences, or anything that has any relevant information which can be useful in the future conversation. This can also be called when the user asks you to remember something. The 'infer' parameter controls processing: True (default) = LLM extracts semantic facts and deduplicates; False = stores exact verbatim text without transformation.")
async def add_memories(text: str, infer: Optional[bool] = None) -> str:
uid = user_id_var.get(None)
client_name = client_name_var.get(None)

Expand All @@ -82,12 +83,17 @@ async def add_memories(text: str) -> str:
if not app.is_active:
return f"Error: App {app.name} is currently paused on OpenMemory. Cannot create new memories."

# Apply default from config if not specified
infer_value = infer if infer is not None else memory_client.config.default_infer

response = memory_client.add(text,
user_id=uid,
metadata={
"source_app": "openmemory",
"mcp_client": client_name,
})
"source_app": "openmemory",
"mcp_client": client_name
},
infer=infer_value
)

# Process the response and update database
if isinstance(response, dict) and 'results' in response:
Expand Down
15 changes: 13 additions & 2 deletions openmemory/api/app/routers/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,12 @@ class Mem0Config(BaseModel):
llm: Optional[LLMProvider] = None
embedder: Optional[EmbedderProvider] = None
vector_store: Optional[VectorStoreProvider] = None
default_infer: Optional[bool] = Field(
None,
description="Default value for infer parameter when not specified in API/MCP calls. "
"When True: enables LLM fact extraction and deduplication. "
"When False: stores verbatim text without transformation."
)

class ConfigSchema(BaseModel):
openmemory: Optional[OpenMemoryConfig] = None
Expand Down Expand Up @@ -69,7 +75,8 @@ def get_default_configuration():
"api_key": "env:OPENAI_API_KEY"
}
},
"vector_store": None
"vector_store": None,
"default_infer": True
}
}

Expand Down Expand Up @@ -154,7 +161,11 @@ async def update_configuration(config: ConfigSchema, db: Session = Depends(get_d

# Update mem0 settings
updated_config["mem0"] = config.mem0.dict(exclude_none=True)


# Save the configuration to database
save_config_to_db(db, updated_config)
reset_memory_client()
return updated_config

@router.patch("/", response_model=ConfigSchema)
async def patch_configuration(config_update: ConfigSchema, db: Session = Depends(get_db)):
Expand Down
15 changes: 12 additions & 3 deletions openmemory/api/app/routers/memories.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from fastapi import APIRouter, Depends, HTTPException, Query
from fastapi_pagination import Page, Params
from fastapi_pagination.ext.sqlalchemy import paginate as sqlalchemy_paginate
from pydantic import BaseModel
from pydantic import BaseModel, Field
from sqlalchemy import func
from sqlalchemy.orm import Session, joinedload

Expand Down Expand Up @@ -213,7 +213,13 @@ class CreateMemoryRequest(BaseModel):
user_id: str
text: str
metadata: dict = {}
infer: bool = True
infer: Optional[bool] = Field(
None,
description="Enable LLM processing for fact extraction and deduplication. "
"When True: content is analyzed and transformed into semantic facts. "
"When False: stores exact verbatim text without transformation. "
"When None: uses default from server configuration (default_infer)."
)
app: str = "openmemory"


Expand Down Expand Up @@ -254,6 +260,9 @@ async def create_memory(
"error": str(client_error)
}

# Apply default from config if not specified
infer_value = request.infer if request.infer is not None else memory_client.config.default_infer

# Try to save to Qdrant via memory_client
try:
qdrant_response = memory_client.add(
Expand All @@ -263,7 +272,7 @@ async def create_memory(
"source_app": "openmemory",
"mcp_client": request.app,
},
infer=request.infer
infer=infer_value
)

# Log the response for debugging
Expand Down
27 changes: 19 additions & 8 deletions openmemory/api/app/utils/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,10 +304,11 @@ def get_memory_client(custom_instructions: str = None):
try:
# Start with default configuration
config = get_default_memory_config()
# Variable to track custom instructions

# Variables to track custom prompts and defaults from database
db_custom_instructions = None

db_default_infer = None

# Load configuration from database
try:
db = SessionLocal()
Expand All @@ -316,14 +317,19 @@ def get_memory_client(custom_instructions: str = None):
if db_config:
json_config = db_config.value

# Extract custom instructions from openmemory settings
if "openmemory" in json_config and "custom_instructions" in json_config["openmemory"]:
db_custom_instructions = json_config["openmemory"]["custom_instructions"]

# Extract custom prompts from openmemory settings
if "openmemory" in json_config:
if "custom_instructions" in json_config["openmemory"]:
db_custom_instructions = json_config["openmemory"]["custom_instructions"]

# Override defaults with configurations from the database
if "mem0" in json_config:
mem0_config = json_config["mem0"]


# Extract default flags from mem0 config
if "default_infer" in mem0_config:
db_default_infer = mem0_config["default_infer"]

# Update LLM configuration if available
if "llm" in mem0_config and mem0_config["llm"] is not None:
config["llm"] = mem0_config["llm"]
Expand Down Expand Up @@ -357,6 +363,11 @@ def get_memory_client(custom_instructions: str = None):
if instructions_to_use:
config["custom_fact_extraction_prompt"] = instructions_to_use

# Use database value for default_infer
# Note: Must use 'is not None' check to properly handle False value
if db_default_infer is not None:
config["default_infer"] = db_default_infer

# ALWAYS parse environment variables in the final config
# This ensures that even default config values like "env:OPENAI_API_KEY" get parsed
print("Parsing environment variables in final config...")
Expand Down
33 changes: 33 additions & 0 deletions openmemory/ui/components/form-view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,16 @@ export function FormView({ settings, onChange }: FormViewProps) {
})
}

const handleMem0ConfigChange = (key: string, value: any) => {
onChange({
...settings,
mem0: {
...settings.mem0,
[key]: value,
},
})
}

const needsLlmApiKey = settings.mem0?.llm?.provider?.toLowerCase() !== "ollama"
const needsEmbedderApiKey = settings.mem0?.embedder?.provider?.toLowerCase() !== "ollama"
const isLlmOllama = settings.mem0?.llm?.provider?.toLowerCase() === "ollama"
Expand Down Expand Up @@ -352,6 +362,29 @@ export function FormView({ settings, onChange }: FormViewProps) {
</CardContent>
</Card>

{/* Default Memory Processing Settings */}
<Card>
<CardHeader>
<CardTitle>Default Memory Processing Settings</CardTitle>
<CardDescription>Configure default behavior for memory operations</CardDescription>
</CardHeader>
<CardContent className="space-y-6">
<div className="flex items-center justify-between">
<div className="space-y-0.5 flex-1">
<Label htmlFor="default-infer">Default Infer</Label>
<p className="text-xs text-muted-foreground">
Enable LLM processing (extraction & deduplication) by default
</p>
</div>
<Switch
id="default-infer"
checked={settings.mem0?.default_infer !== false}
onCheckedChange={(checked) => handleMem0ConfigChange("default_infer", checked)}
/>
</div>
</CardContent>
</Card>

{/* Backup (Export / Import) */}
<Card>
<CardHeader>
Expand Down
Loading