- Update workspace state timestamp - Add weekly summary to MEMORY.md (removing duplicate entry)
255 lines
8.8 KiB
Python
255 lines
8.8 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Memory Manager - Consolidates daily notes into MEMORY.md and archives old files.
|
|
|
|
Usage:
|
|
python consolidate_memory.py [--dry-run] [--force] [--week-start YYYY-MM-DD]
|
|
|
|
Options:
|
|
--dry-run Show what would be done without making changes
|
|
--force Skip confirmation prompts
|
|
--week-start Date to start the week (default: last Sunday)
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import argparse
|
|
import logging
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
import json
|
|
from typing import List, Tuple
|
|
|
|
# Configure logging
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s [%(levelname)s] %(message)s',
|
|
datefmt='%Y-%m-%d %H:%M:%S'
|
|
)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
def get_memory_dir() -> Path:
|
|
"""Get memory directory from env or default."""
|
|
memory_dir = os.getenv('MEMORY_DIR', './memory')
|
|
path = Path(memory_dir).resolve()
|
|
if not path.exists():
|
|
logger.warning(f"Memory directory {path} does not exist, creating...")
|
|
path.mkdir(parents=True, exist_ok=True)
|
|
return path
|
|
|
|
def get_archive_dir() -> Path:
|
|
"""Get archive directory from env or default."""
|
|
archive_dir = os.getenv('MEMORY_ARCHIVE', 'memory/archive')
|
|
path = Path(archive_dir)
|
|
path.mkdir(parents=True, exist_ok=True)
|
|
return path
|
|
|
|
def get_max_age_days() -> int:
|
|
"""Get max age for archival from env or default."""
|
|
try:
|
|
return int(os.getenv('MAX_AGE_DAYS', '30'))
|
|
except ValueError:
|
|
return 30
|
|
|
|
def list_daily_files(memory_dir: Path) -> List[Path]:
|
|
"""List all daily memory files (YYYY-MM-DD.md)."""
|
|
files = []
|
|
for f in memory_dir.glob('*.md'):
|
|
if f.name.count('-') == 2 and f.name.endswith('.md'):
|
|
try:
|
|
datetime.strptime(f.stem, '%Y-%m-%d')
|
|
files.append(f)
|
|
except ValueError:
|
|
continue # Not a date file
|
|
return sorted(files)
|
|
|
|
def read_file(path: Path) -> str:
|
|
"""Read file content."""
|
|
try:
|
|
return path.read_text(encoding='utf-8')
|
|
except Exception as e:
|
|
logger.error(f"Failed to read {path}: {e}")
|
|
return ""
|
|
|
|
def write_file(path: Path, content: str) -> bool:
|
|
"""Write file content."""
|
|
try:
|
|
path.write_text(content, encoding='utf-8')
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Failed to write {path}: {e}")
|
|
return False
|
|
|
|
def extract_insights(content: str, max_items: int = 20) -> List[str]:
|
|
"""
|
|
Extract important insights from daily memory content.
|
|
Looks for bullet points, decisions, and key facts.
|
|
"""
|
|
insights = []
|
|
lines = content.split('\n')
|
|
|
|
for line in lines:
|
|
stripped = line.strip()
|
|
# Skip empty lines and obvious headers
|
|
if not stripped or stripped.startswith('#') or stripped.startswith('##'):
|
|
continue
|
|
|
|
# Capture bullet points and decision markers
|
|
if stripped.startswith(('-', '*', '•', '→', '✓', '✗', '✅', '❌', '📌', '💡')):
|
|
insight = stripped.lstrip('- *•→✓✗✅❌📌💡').strip()
|
|
if insight and len(insight) > 10: # Minimum length
|
|
insights.append(insight)
|
|
if len(insights) >= max_items:
|
|
break
|
|
|
|
return insights
|
|
|
|
def consolidate_week(memory_dir: Path, week_start: datetime, week_end: datetime) -> Tuple[List[str], List[Path]]:
|
|
"""
|
|
Consolidate daily files for a given week.
|
|
Returns (insights_list, processed_files)
|
|
"""
|
|
insights = []
|
|
processed_files = []
|
|
|
|
# Find files within the week range
|
|
for f in list_daily_files(memory_dir):
|
|
try:
|
|
file_date = datetime.strptime(f.stem, '%Y-%m-%d')
|
|
if week_start <= file_date < week_end:
|
|
content = read_file(f)
|
|
if content:
|
|
week_insights = extract_insights(content)
|
|
insights.extend(week_insights)
|
|
processed_files.append(f)
|
|
except ValueError:
|
|
continue
|
|
|
|
return insights, processed_files
|
|
|
|
def update_memory_file(memory_path: Path, week_label: str, insights: List[str]) -> bool:
|
|
"""Append weekly summary to MEMORY.md."""
|
|
if not memory_path.exists():
|
|
# Create new MEMORY.md with header
|
|
content = f"# Memory\n\n## {week_label}\n\n"
|
|
else:
|
|
content = read_file(memory_path)
|
|
# Ensure trailing newline
|
|
if not content.endswith('\n'):
|
|
content += '\n'
|
|
|
|
# Add weekly section
|
|
section = f"\n## {week_label}\n\n"
|
|
if insights:
|
|
for insight in insights[:30]: # Limit to top 30
|
|
section += f"- {insight}\n"
|
|
else:
|
|
section += "*No notable insights this week.*\n"
|
|
|
|
section += "\n"
|
|
content += section
|
|
|
|
return write_file(memory_path, content)
|
|
|
|
def archive_old_files(memory_dir: Path, archive_dir: Path, max_age_days: int, dry_run: bool) -> int:
|
|
"""
|
|
Move files older than max_age_days to archive.
|
|
Returns count of archived files.
|
|
"""
|
|
cutoff = datetime.now() - timedelta(days=max_age_days)
|
|
archived = 0
|
|
|
|
for f in list_daily_files(memory_dir):
|
|
try:
|
|
file_date = datetime.strptime(f.stem, '%Y-%m-%d')
|
|
if file_date < cutoff:
|
|
logger.info(f"Archiving {f.name} (older than {max_age_days} days)")
|
|
if not dry_run:
|
|
target = archive_dir / f"{f.stem}.md.gz"
|
|
# Compress and move
|
|
import gzip
|
|
with f.open('rb') as src, gzip.open(target, 'wb') as dst:
|
|
dst.writelines(src)
|
|
f.unlink()
|
|
logger.debug(f"Archived to {target}")
|
|
archived += 1
|
|
except ValueError:
|
|
continue
|
|
|
|
return archived
|
|
|
|
def get_last_sunday(ref_date: datetime = None) -> datetime:
|
|
"""Get the most recent Sunday before the given date (default: today)."""
|
|
if ref_date is None:
|
|
ref_date = datetime.now()
|
|
days_since_sunday = (ref_date.weekday() + 1) % 7
|
|
return ref_date - timedelta(days=days_since_sunday)
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description='Memory manager: consolidate and archive old memory files.')
|
|
parser.add_argument('--dry-run', action='store_true', help='Show actions without performing them')
|
|
parser.add_argument('--force', action='store_true', help='Skip confirmation prompts')
|
|
parser.add_argument('--week-start', type=str, help='Week start date (YYYY-MM-DD)')
|
|
args = parser.parse_args()
|
|
|
|
memory_dir = get_memory_dir()
|
|
archive_dir = get_archive_dir()
|
|
max_age_days = get_max_age_days()
|
|
|
|
# Determine week range
|
|
if args.week_start:
|
|
try:
|
|
week_start = datetime.strptime(args.week_start, '%Y-%m-%d')
|
|
except ValueError:
|
|
logger.error(f"Invalid date format: {args.week_start}")
|
|
return 1
|
|
else:
|
|
week_start = get_last_sunday()
|
|
|
|
week_end = week_start + timedelta(days=7)
|
|
week_label = f"Weekly Summary ({week_start.strftime('%Y-%m-%d')} to {week_end.strftime('%Y-%m-%d')})"
|
|
|
|
logger.info(f"Memory manager starting")
|
|
logger.info(f"Memory dir: {memory_dir}")
|
|
logger.info(f"Archive dir: {archive_dir}")
|
|
logger.info(f"Week: {week_label}")
|
|
logger.info(f"Max age for archival: {max_age_days} days")
|
|
if args.dry_run:
|
|
logger.info("DRY RUN - No changes will be made")
|
|
|
|
# 1. Consolidate week
|
|
insights, processed_files = consolidate_week(memory_dir, week_start, week_end)
|
|
logger.info(f"Found {len(insights)} insights from {len(processed_files)} daily files")
|
|
|
|
if insights:
|
|
if not args.dry_run:
|
|
memory_file = memory_dir.parent / 'MEMORY.md' if memory_dir.name == 'memory' else memory_dir.parent / 'memory' / 'MEMORY.md'
|
|
# Try multiple possible locations
|
|
possible_paths = [
|
|
memory_dir.parent / 'MEMORY.md',
|
|
Path('/root/.openclaw/workspace/MEMORY.md'),
|
|
Path('./MEMORY.md')
|
|
]
|
|
for mem_path in possible_paths:
|
|
if mem_path.exists() or mem_path.parent.exists():
|
|
memory_file = mem_path
|
|
break
|
|
|
|
logger.info(f"Updating MEMORY.md at {memory_file}")
|
|
if update_memory_file(memory_file, week_label, insights):
|
|
logger.info("Weekly summary added to MEMORY.md")
|
|
else:
|
|
logger.error("Failed to update MEMORY.md")
|
|
return 1
|
|
else:
|
|
logger.info("No insights to consolidate")
|
|
|
|
# 2. Archive old files
|
|
archived = archive_old_files(memory_dir, archive_dir, max_age_days, args.dry_run)
|
|
logger.info(f"Archived {archived} old files")
|
|
|
|
logger.info("Memory manager completed successfully")
|
|
return 0
|
|
|
|
if __name__ == '__main__':
|
|
sys.exit(main()) |