Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
115 changes: 115 additions & 0 deletions crates/terraphim_agent/src/learnings/capture.rs
Original file line number Diff line number Diff line change
Expand Up @@ -932,6 +932,121 @@ pub fn query_all_entries(
Ok(filtered)
}

/// Score entry relevance based on keyword matching.
/// Returns a score based on the number of matching keywords between
/// the context and the learning content.
fn score_entry_relevance(entry: &LearningEntry, context_keywords: &[String]) -> usize {
let text = match entry {
LearningEntry::Learning(l) => {
format!("{} {} {:?}", l.command, l.error_output, l.tags)
}
LearningEntry::Correction(c) => {
format!("{} {} {}", c.original, c.corrected, c.context_description)
}
}
.to_lowercase();

context_keywords
.iter()
.filter(|keyword| text.contains(*keyword))
.count()
}

/// A scored learning entry with its relevance score.
#[derive(Debug, Clone)]
pub struct ScoredEntry {
/// The learning entry
pub entry: LearningEntry,
/// Relevance score (higher is better)
pub score: usize,
}

impl ScoredEntry {
/// Format as a suggestion line for display.
#[allow(dead_code)]
pub fn format_suggestion(&self) -> String {
match &self.entry {
LearningEntry::Learning(l) => {
format!("[cmd] {} (exit: {}) - {}", l.command, l.exit_code, l.id)
}
LearningEntry::Correction(c) => {
format!(
"[{}] {} -> {} - {}",
c.correction_type, c.original, c.corrected, c.id
)
}
}
}
}

/// Suggest learnings based on context relevance.
///
/// Takes a context string (e.g., current working directory or task description),
/// extracts keywords from it, and scores all learnings by keyword frequency.
/// Returns the top-N most relevant learnings.
///
/// # Arguments
///
/// * `storage_dir` - Directory containing learning markdown files
/// * `context` - Context string to match against (e.g., "rust project with cargo build")
/// * `limit` - Maximum number of suggestions to return
///
/// # Returns
///
/// List of scored entries sorted by relevance (highest first).
pub fn suggest_learnings(
storage_dir: &PathBuf,
context: &str,
limit: usize,
) -> Result<Vec<ScoredEntry>, LearningError> {
let all_entries = list_all_entries(storage_dir, usize::MAX)?;

if all_entries.is_empty() {
return Ok(Vec::new());
}

// Extract keywords from context (simple word tokenization)
let context_keywords: Vec<String> = context
.split_whitespace()
.map(|w| {
w.to_lowercase()
.trim_matches(|c: char| !c.is_alphanumeric())
.to_string()
})
.filter(|w| !w.is_empty() && w.len() > 2) // Filter out short words
.collect();

if context_keywords.is_empty() {
// Fallback: return most recent entries if no keywords extracted
let recent: Vec<ScoredEntry> = all_entries
.into_iter()
.take(limit)
.map(|entry| ScoredEntry { entry, score: 0 })
.collect();
return Ok(recent);
}

// Score all entries
let mut scored: Vec<ScoredEntry> = all_entries
.into_iter()
.map(|entry| {
let score = score_entry_relevance(&entry, &context_keywords);
ScoredEntry { entry, score }
})
.filter(|se| se.score > 0) // Only include entries with at least one match
.collect();

// Sort by score descending
scored.sort_by(|a, b| b.score.cmp(&a.score));

// Limit results
if scored.len() > limit {
scored.truncate(limit);
}

Ok(scored)
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
6 changes: 4 additions & 2 deletions crates/terraphim_agent/src/learnings/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,11 @@ mod install;
mod procedure;
mod redaction;

#[allow(unused_imports)]
pub use capture::{
CorrectionType, LearningSource, capture_correction, capture_failed_command, correct_learning,
list_all_entries, query_all_entries,
CorrectionType, LearningEntry, LearningSource, ScoredEntry, capture_correction,
capture_failed_command, correct_learning, list_all_entries, query_all_entries,
suggest_learnings,
};

// Re-export for testing - not used by CLI yet
Expand Down
64 changes: 63 additions & 1 deletion crates/terraphim_agent/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -777,6 +777,17 @@ enum LearnSub {
#[arg(long)]
session_id: Option<String>,
},
/// Suggest relevant past learnings based on context
Suggest {
/// Context string (e.g., current working directory or task description)
context: String,
/// Maximum number of suggestions to show
#[arg(long, default_value_t = 5)]
limit: usize,
/// Show global learnings instead of project
#[arg(long, default_value_t = false)]
global: bool,
},
/// Process hook input from AI agents (reads JSON from stdin)
Hook {
/// AI agent format
Expand Down Expand Up @@ -1955,7 +1966,7 @@ async fn run_offline_command(
async fn run_learn_command(sub: LearnSub) -> Result<()> {
use learnings::{
CorrectionType, LearningCaptureConfig, capture_correction, capture_failed_command,
correct_learning, list_all_entries, query_all_entries,
correct_learning, list_all_entries, query_all_entries, suggest_learnings,
};
let config = LearningCaptureConfig::default();

Expand Down Expand Up @@ -2087,6 +2098,57 @@ async fn run_learn_command(sub: LearnSub) -> Result<()> {
}
}
}
LearnSub::Suggest {
context,
limit,
global,
} => {
let storage_loc = config.storage_location();
let storage_dir = if global {
&config.global_dir
} else {
&storage_loc
};
match suggest_learnings(storage_dir, &context, limit) {
Ok(scored) => {
if scored.is_empty() {
println!("No relevant learnings found for context.");
} else {
println!("Suggested learnings for context:",);
for (i, scored_entry) in scored.iter().enumerate() {
let source_indicator = match scored_entry.entry.source() {
learnings::LearningSource::Project => "[P]",
learnings::LearningSource::Global => "[G]",
};
let suggestion = match &scored_entry.entry {
learnings::LearningEntry::Learning(l) => {
format!("[cmd] {} (exit: {})", l.command, l.exit_code)
}
learnings::LearningEntry::Correction(c) => {
format!(
"[{}] {} -> {}",
c.correction_type, c.original, c.corrected
)
}
};
println!(
" {}. {} {} (score: {})",
i + 1,
source_indicator,
suggestion,
scored_entry.score
);
println!(" ID: {}", scored_entry.entry.id());
if let Some(correction) = scored_entry.entry.correction_text() {
println!(" Correction: {}", correction);
}
}
}
Ok(())
}
Err(e) => Err(e.into()),
}
}
LearnSub::Hook { format } => learnings::process_hook_input(format)
.await
.map_err(|e| e.into()),
Expand Down
Loading