think-bigger/test_phase_1.py
Kade Heyborne 5ede9e2e7e
Initial commit: Phase 1 Dual Manifold Cognitive Architecture
- Complete dual manifold memory system (episodic, semantic, persona layers)
- Braiding engine with structural gates for optimal learning suggestions
- FastAPI backend with comprehensive REST endpoints
- Domain directory templates and configuration system
- Comprehensive planning documentation for 5-phase development
- Integrated UI design specifications for Phase 2
- Fixed linting issues and code quality standards
2025-12-03 16:54:29 -07:00

167 lines
6.5 KiB
Python

#!/usr/bin/env python3
"""Test script for Phase 1 Dual Manifold Cognitive Architecture implementation."""
import asyncio
import os
from pathlib import Path
# Set up data directory
data_dir = Path.home() / "think_bigger_test_data"
data_dir.mkdir(exist_ok=True)
async def test_phase_1_implementation():
"""Test the complete Phase 1 implementation."""
print("🧠 Testing Phase 1: Dual Manifold Cognitive Architecture")
print("=" * 60)
try:
# Import required modules
from sentence_transformers import SentenceTransformer
from think_bigger.core.memory.episodic.episodic_memory import EpisodicMemory
from think_bigger.core.memory.semantic.semantic_distiller import (
SemanticDistiller,
)
from think_bigger.core.memory.persona.persona_graph import PersonaGraph
from think_bigger.core.memory.manifolds.dual_manifold import DualManifoldEngine
from think_bigger.core.memory.braiding.braiding_engine import BraidingEngine
print("✅ Imports successful")
# Initialize embedding model
print("🔄 Initializing embedding model...")
embedding_model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
print("✅ Embedding model ready")
# Initialize memory systems
print("🔄 Initializing memory systems...")
# Episodic Memory
episodic_memory = EpisodicMemory(str(data_dir / "episodic"))
print("✅ Episodic memory initialized")
# Semantic Distiller (using mock API key for testing)
semantic_distiller = SemanticDistiller(
str(data_dir / "semantic"), "mock-api-key"
)
print("✅ Semantic distiller initialized")
# Persona Graph
persona_graph = PersonaGraph(str(data_dir / "persona"))
print("✅ Persona graph initialized")
# Dual Manifold Engine
dual_manifold_engine = DualManifoldEngine(
str(data_dir / "manifolds"), embedding_model, "mock-api-key"
)
print("✅ Dual manifold engine initialized")
# Braiding Engine
braiding_engine = BraidingEngine(
str(data_dir / "braiding"), dual_manifold_engine, "mock-api-key"
)
print("✅ Braiding engine initialized")
print("\n📝 Testing memory entry addition...")
# Add some test memory entries
test_entries = [
{
"content": "The dual manifold cognitive architecture combines individual and collective knowledge representations through manifold learning techniques.",
"source_file": "architecture_notes.md",
"metadata": {"topic": "architecture", "importance": "high"},
},
{
"content": "Episodic memory stores specific experiences with temporal preservation, using hybrid indexing with FAISS and BM25.",
"source_file": "memory_system.md",
"metadata": {"topic": "memory", "component": "episodic"},
},
{
"content": "Semantic distillation extracts cognitive trajectories from memory entries using LLM analysis to identify meaningful concepts and their relationships.",
"source_file": "semantic_layer.md",
"metadata": {"topic": "memory", "component": "semantic"},
},
{
"content": "Knowledge graphs in the persona layer use centrality measures and gravity wells to represent structured knowledge with NetworkX.",
"source_file": "persona_layer.md",
"metadata": {"topic": "memory", "component": "persona"},
},
]
added_entries = []
for entry_data in test_entries:
entry_id = episodic_memory.add_entry(
entry_data["content"], entry_data["source_file"], entry_data["metadata"]
)
added_entries.append(episodic_memory.get_entry_by_id(entry_id))
print(f"✅ Added entry: {entry_id}")
print(f"\n📊 Episodic memory stats: {episodic_memory.get_stats()}")
print("\n🔄 Processing entries through dual manifold...")
# Process entries through dual manifold
processing_results = await dual_manifold_engine.process_memory_entries(
added_entries
)
print(f"✅ Processing results: {processing_results}")
print("\n🔍 Testing hybrid search...")
# Test hybrid search
search_results = episodic_memory.hybrid_search(
"cognitive architecture", top_k=3
)
print(f"✅ Found {len(search_results)} results for 'cognitive architecture'")
for result in search_results[:2]: # Show top 2
print(f" - {result.content[:100]}... (score: {result.combined_score:.3f})")
print("\n🧵 Testing braiding engine...")
# Test braiding engine
braided_results = await braiding_engine.braid_search("memory systems", top_k=2)
print(f"✅ Braided search found {len(braided_results)} results")
for result in braided_results:
print(f" - {result.content[:80]}... (confidence: {result.confidence:.3f})")
print("\n📈 System Statistics:")
# Get system statistics
manifold_stats = dual_manifold_engine.get_manifold_stats()
braiding_stats = braiding_engine.get_gate_stats()
print(
f"Individual Manifold: {manifold_stats['individual']['total_points']} points"
)
print(
f"Collective Manifold: {manifold_stats['collective']['total_points']} points"
)
print(f"Braiding Gates: {braiding_stats['total_gates']} gates")
print(
f"Semantic Concepts: {manifold_stats['semantic']['total_concepts']} concepts"
)
print("\n🎉 Phase 1 implementation test completed successfully!")
print("\nKey Deliverables Implemented:")
print("✅ Episodic memory with hybrid FAISS + BM25 indexing")
print("✅ Semantic distillation pipeline")
print("✅ Knowledge graph construction with NetworkX")
print("✅ Dual manifold representation")
print("✅ Braiding engine with structural gates")
print("✅ FastAPI endpoints for all components")
return True
except Exception as e:
print(f"❌ Test failed with error: {e}")
import traceback
traceback.print_exc()
return False
if __name__ == "__main__":
success = asyncio.run(test_phase_1_implementation())
exit(0 if success else 1)