Skip to content

Commit 2585906

Browse files
NTLxNTLx
andauthored
feat: implement memory compression and deduplication (issue #141) (#216)
* feat: implement memory compression and deduplication (issue #141) * fix: resolve mock vector store issue in test_memory_optimize.py - Use Mock(spec=Memory) instead of instantiating Memory to avoid vector store initialization that requires real provider configuration - This fixes the ValueError: Unsupported VectorStore provider: mock Closes #216 * build: drop python 3.10 support to fix pyseekdb dependency conflict * chore: remove unnecessary files per maintainer review - Remove docs/plans/2026-01-31-memory-compression-deduplication.md (AI-generated descriptive file, not needed in formal PR) - Remove uv.lock (project uses plain venv workflow, uv.lock adds no value and causes confusion) Reviewed-by: Teingi --------- Co-authored-by: NTLx <lx@openclaw.ai>
1 parent 492ecf7 commit 2585906

File tree

7 files changed

+555
-3
lines changed

7 files changed

+555
-3
lines changed

.github/workflows/test.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ jobs:
2626
runs-on: ubuntu-latest
2727
strategy:
2828
matrix:
29-
python-version: ["3.10", "3.11", "3.12"]
29+
python-version: ["3.11", "3.12"]
3030
fail-fast: false
3131

3232
steps:
@@ -64,7 +64,7 @@ jobs:
6464
runs-on: ubuntu-latest
6565
strategy:
6666
matrix:
67-
python-version: ["3.10", "3.11", "3.12"]
67+
python-version: ["3.11", "3.12"]
6868
fail-fast: false
6969

7070
steps:

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ classifiers = [
2222
"Topic :: Software Development :: Libraries :: Python Modules",
2323
"Topic :: Scientific/Engineering :: Artificial Intelligence",
2424
]
25-
requires-python = ">=3.10"
25+
requires-python = ">=3.11"
2626
dependencies = [
2727
"pydantic>=2.0.0",
2828
"pydantic-settings>=2.0.0",

src/powermem/core/memory.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from ..integrations.rerank.factory import RerankFactory
2727
from .telemetry import TelemetryManager
2828
from .audit import AuditLogger
29+
from ..intelligence.memory_optimizer import MemoryOptimizer
2930
from ..intelligence.plugin import IntelligentMemoryPlugin, EbbinghausIntelligencePlugin
3031
from ..utils.utils import remove_code_blocks, convert_config_object_to_dict, parse_vision_messages, set_timezone
3132
from ..prompts.intelligent_memory_prompts import (
@@ -309,6 +310,9 @@ def __init__(
309310
audit_config = self.config
310311
self.audit = AuditLogger(audit_config)
311312

313+
# Initialize memory optimizer
314+
self.optimizer = MemoryOptimizer(self.storage, self.llm)
315+
312316
# Save custom prompts from config
313317
if self.memory_config:
314318
self.custom_fact_extraction_prompt = self.memory_config.custom_fact_extraction_prompt
@@ -1586,6 +1590,33 @@ def get_all(
15861590
logger.error(f"Failed to get all memories: {e}")
15871591
raise
15881592

1593+
def optimize(self, strategy: str = "deduplicate", **kwargs) -> Dict[str, Any]:
1594+
"""
1595+
Optimize memory storage.
1596+
1597+
Args:
1598+
strategy: "deduplicate" or "compress"
1599+
**kwargs: Additional args like threshold, user_id, dedup_strategy
1600+
1601+
Returns:
1602+
Optimization stats
1603+
"""
1604+
if strategy == "deduplicate":
1605+
# Extract specific args
1606+
sub_strategy = kwargs.get("dedup_strategy", "exact")
1607+
return self.optimizer.deduplicate(
1608+
user_id=kwargs.get("user_id"),
1609+
strategy=sub_strategy,
1610+
threshold=kwargs.get("threshold", 0.95)
1611+
)
1612+
elif strategy == "compress":
1613+
return self.optimizer.compress(
1614+
user_id=kwargs.get("user_id"),
1615+
threshold=kwargs.get("threshold", 0.85)
1616+
)
1617+
else:
1618+
raise ValueError(f"Unknown optimization strategy: {strategy}")
1619+
15891620
def get_statistics(
15901621
self, user_id: Optional[str] = None, agent_id: Optional[str] = None
15911622
) -> Dict[str, Any]:

0 commit comments

Comments
 (0)