From dec57585f8fce41eb797a2b952d46c61d9100823 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Mon, 11 May 2026 20:46:47 +0800 Subject: [PATCH 01/17] refactor(mcp): complete trait-ization of remaining MCP tool layer - Extend RegistryClient with save_relation, query_relations, delete_relations, list_vault_notes - Add WorkflowClient trait (list_workflows, get_workflow, run_workflow, get_execution) - Add VaultClient trait (list_vault_notes, read_vault_note, get_backlinks, build_vault_graph) - Implement all traits on AppContext (registry.rs, workflow/mod.rs, vault/mod.rs) - Enable AppContext Clone via Arc> for spawn_blocking safety - Refactor relations.rs, workflow.rs, vault.rs to use trait calls exclusively - Eliminate all production-code inline crate:: calls in mcp/tools/ relations|workflow|vault --- src/clients.rs | 35 +++++ src/mcp/tools/relations.rs | 90 ++----------- src/mcp/tools/vault.rs | 261 ++++++++++--------------------------- src/mcp/tools/workflow.rs | 115 +--------------- src/registry.rs | 125 ++++++++++++++++++ src/storage.rs | 7 +- src/vault/mod.rs | 145 +++++++++++++++++++++ src/workflow/mod.rs | 131 +++++++++++++++++++ 8 files changed, 524 insertions(+), 385 deletions(-) diff --git a/src/clients.rs b/src/clients.rs index 738ead7..2b94c4a 100644 --- a/src/clients.rs +++ b/src/clients.rs @@ -74,6 +74,25 @@ pub trait RegistryClient: Send + Sync { ) -> Result; fn query_dead_code(&self, repo_id: &str, include_pub: bool, limit: usize) -> Result; + + fn save_relation( + &self, + from: &str, + to: &str, + relation_type: &str, + confidence: f64, + ) -> Result; + + fn query_relations( + &self, + entity_id: &str, + direction: &str, + relation_type: Option<&str>, + ) -> Result; + + fn delete_relations(&self, from: &str, to: &str, relation_type: Option<&str>) -> Result; + + fn list_vault_notes(&self) -> Result; } /// Knowledge engine operations. @@ -110,3 +129,19 @@ pub trait SearchClient: Send + Sync { limit: usize, ) -> Result>; } + +/// Workflow management exposed to MCP tools. +pub trait WorkflowClient: Send + Sync { + fn list_workflows(&self) -> Result; + fn get_workflow(&self, workflow_id: &str) -> Result; + fn run_workflow(&self, workflow_id: &str, inputs: Value) -> Result; + fn get_execution(&self, exec_id: i64) -> Result; +} + +/// Vault (Markdown knowledge-base) operations exposed to MCP tools. +pub trait VaultClient: Send + Sync { + fn list_vault_notes(&self) -> Result; + fn read_vault_note(&self, path: &str) -> Result; + fn get_backlinks(&self, note_id: &str) -> Result; + fn build_vault_graph(&self, repo_id: Option<&str>) -> Result; +} diff --git a/src/mcp/tools/relations.rs b/src/mcp/tools/relations.rs index 70ee592..cf88238 100644 --- a/src/mcp/tools/relations.rs +++ b/src/mcp/tools/relations.rs @@ -1,5 +1,6 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +use crate::clients::RegistryClient; use crate::mcp::McpTool; #[derive(Clone)] @@ -83,10 +84,7 @@ Returns: success boolean and relation details."#, })); } - let conn = ctx.conn()?; - if let Err(e) = - crate::registry::relation::save_relation(&conn, &from, &to, &rel_type, confidence) - { + if let Err(e) = ctx.save_relation(&from, &to, &rel_type, confidence) { let msg = e.to_string(); if msg.contains("foreign key constraint") || msg.contains("FOREIGN KEY") { return Ok(serde_json::json!({ @@ -162,73 +160,9 @@ Returns: JSON array of relations with to_entity_id, relation_type, confidence, a })); } - let conn = ctx.conn()?; - let results = match direction { - "bidirectional" => { - let rows = crate::registry::relation::find_related_entities( - &conn, - &entity_id, - relation_type, - )?; - rows.into_iter() - .map(|(from, to, rt, conf, created)| { - serde_json::json!({ - "from_entity_id": from, - "to_entity_id": to, - "relation_type": rt, - "confidence": conf, - "created_at": created - }) - }) - .collect::>() - } - "incoming" => { - let mut stmt = conn.prepare( - "SELECT from_entity_id, relation_type, confidence, created_at FROM relations - WHERE to_entity_id = ?1 - ORDER BY confidence DESC", - )?; - let rows = stmt.query_map([&entity_id], |row| { - Ok(( - row.get::<_, String>(0)?, - row.get::<_, String>(1)?, - row.get::<_, f64>(2)?, - row.get::<_, String>(3)?, - )) - })?; - let filtered: Vec<_> = if let Some(rt) = relation_type.filter(|s| !s.is_empty()) { - rows.filter(|r| r.as_ref().map(|(_, t, _, _)| t == rt).unwrap_or(false)) - .collect::, _>>()? - } else { - rows.collect::, _>>()? - }; - filtered - .into_iter() - .map(|(from, rt, conf, created)| { - serde_json::json!({ - "from_entity_id": from, - "relation_type": rt, - "confidence": conf, - "created_at": created - }) - }) - .collect::>() - } - _ => { - let rows = - crate::registry::relation::list_relations(&conn, &entity_id, relation_type)?; - rows.into_iter() - .map(|(to, rt, conf, created)| { - serde_json::json!({ - "to_entity_id": to, - "relation_type": rt, - "confidence": conf, - "created_at": created - }) - }) - .collect::>() - } - }; + let value = ctx.query_relations(&entity_id, direction, relation_type)?; + let results = + value.get("relations").and_then(|v| v.as_array()).cloned().unwrap_or_default(); Ok(serde_json::json!({ "success": true, @@ -297,17 +231,9 @@ Returns: success boolean and count of deleted relations."#, })); } - let conn = ctx.conn()?; - let count = match rel_type.as_deref().filter(|s| !s.is_empty()) { - Some(rt) => conn.execute( - "DELETE FROM relations WHERE from_entity_id = ?1 AND to_entity_id = ?2 AND relation_type = ?3", - rusqlite::params![&from, &to, rt], - )?, - None => conn.execute( - "DELETE FROM relations WHERE from_entity_id = ?1 AND to_entity_id = ?2", - rusqlite::params![&from, &to], - )?, - }; + let value = + ctx.delete_relations(&from, &to, rel_type.as_deref().filter(|s| !s.is_empty()))?; + let count = value.get("deleted").and_then(|v| v.as_u64()).unwrap_or(0) as usize; Ok(serde_json::json!({ "success": true, diff --git a/src/mcp/tools/vault.rs b/src/mcp/tools/vault.rs index 8b90f9a..f2461de 100644 --- a/src/mcp/tools/vault.rs +++ b/src/mcp/tools/vault.rs @@ -1,6 +1,8 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +use crate::clients::{DigestClient, VaultClient}; use crate::mcp::McpTool; +use crate::registry::VaultNote; use anyhow::Context; #[derive(Clone)] @@ -51,42 +53,45 @@ Returns: JSON array of matching notes. Each includes: id, title, path, and tags. .and_then(|v| v.as_str()) .context("Missing required argument: query")?; - let pool = ctx.pool(); - let results = tokio::task::spawn_blocking({ - let query = query.to_string(); - move || { - let conn = pool.get()?; - let notes = crate::registry::vault::list_vault_notes(&conn)?; - let keywords: Vec<&str> = query.split_whitespace().collect(); - - let filtered: Vec<_> = notes - .into_iter() - .filter(|n| { - let content = crate::vault::fs_io::read_note_body(&n.path) - .map(|(body, _fm)| body) - .unwrap_or_default(); - let hay = format!( - "{} {} {} {}", - n.id, - n.title.as_deref().unwrap_or(""), - n.tags.join(","), - content - ) - .to_lowercase(); - keywords.iter().all(|kw| hay.contains(&kw.to_lowercase())) + let ctx = ctx.clone(); + let query_owned = query.to_string(); + let results = tokio::task::spawn_blocking(move || { + let value = ctx.list_vault_notes()?; + let notes: Vec = serde_json::from_value( + value.get("notes").cloned().unwrap_or(serde_json::json!([])), + ) + .unwrap_or_default(); + let keywords: Vec<&str> = query_owned.split_whitespace().collect(); + + let filtered: Vec<_> = notes + .into_iter() + .filter(|n| { + let content = ctx + .read_vault_note(&n.path) + .ok() + .and_then(|v| v.get("content").and_then(|c| c.as_str()).map(String::from)) + .unwrap_or_default(); + let hay = format!( + "{} {} {} {}", + n.id, + n.title.as_deref().unwrap_or(""), + n.tags.join(","), + content + ) + .to_lowercase(); + keywords.iter().all(|kw| hay.contains(&kw.to_lowercase())) + }) + .map(|n| { + serde_json::json!({ + "id": n.id, + "title": n.title, + "path": n.path, + "tags": n.tags, }) - .map(|n| { - serde_json::json!({ - "id": n.id, - "title": n.title, - "path": n.path, - "tags": n.tags, - }) - }) - .collect(); + }) + .collect(); - anyhow::Ok(filtered) - } + anyhow::Ok(filtered) }) .await .map_err(|e| anyhow::anyhow!("spawn_blocking failed: {}", e))??; @@ -140,15 +145,18 @@ Returns: JSON with frontmatter (id, repo, tags, ai_context, created, updated) an async fn invoke( &self, args: serde_json::Value, - _ctx: &mut crate::storage::AppContext, + ctx: &mut crate::storage::AppContext, ) -> anyhow::Result { let path = args .get("path") .and_then(|v| v.as_str()) .context("Missing required argument: path")?; - let (body, frontmatter) = crate::vault::fs_io::read_note_body(path) + let value = ctx + .read_vault_note(path) .context("Failed to read note — file not found or unreadable")?; + let body = value.get("content").cloned().unwrap_or(serde_json::json!("")); + let frontmatter = value.get("frontmatter").cloned().unwrap_or(serde_json::json!(null)); Ok(serde_json::json!({ "success": true, @@ -325,30 +333,12 @@ Returns: JSON array of backlinking notes, each with id, title, and path."#, .and_then(|v| v.as_str()) .context("Missing required argument: note_id")?; - let vault_dir = ctx.storage.workspace_dir().ok().map(|ws| ws.join("vault")); - let backlinks = tokio::task::spawn_blocking({ - let note_id = note_id.to_string(); - let vault_dir = vault_dir.clone(); - move || { - if let Some(vd) = vault_dir { - match crate::vault::backlinks::build_backlink_index(&vd) { - Ok(index) => crate::vault::backlinks::get_backlinks(&index, ¬e_id), - Err(_) => Vec::new(), - } - } else { - Vec::new() - } - } - }) - .await - .map_err(|e| anyhow::anyhow!("spawn_blocking failed: {}", e))?; - - Ok(serde_json::json!({ - "success": true, - "target": note_id, - "count": backlinks.len(), - "backlinks": backlinks, - })) + let ctx = ctx.clone(); + let note_id = note_id.to_string(); + let value = tokio::task::spawn_blocking(move || ctx.get_backlinks(¬e_id)) + .await + .map_err(|e| anyhow::anyhow!("spawn_blocking failed: {}", e))??; + Ok(value) } } @@ -386,39 +376,32 @@ Returns: JSON with success status and the generated file path."#, let today = chrono::Utc::now().format("%Y-%m-%d").to_string(); let rel_path = format!("99-Meta/Daily/{}.md", today); - let pool = ctx.pool(); - let config = ctx.config.clone(); - let i18n = ctx.i18n; - + let ctx = ctx.clone(); + let today_owned = today.clone(); let vault_root = ctx .storage .workspace_dir() .map(|ws| ws.join("vault")) .unwrap_or_else(|_| std::path::PathBuf::from("vault")); - let file_path = tokio::task::spawn_blocking({ - let rel_path = rel_path.clone(); - let today = today.clone(); - let vault_root = vault_root.clone(); - move || { - let conn = pool.get()?; - let digest = crate::digest::generate_daily_digest(&conn, &config, &i18n)?; - - let target = resolve_vault_path(&rel_path, &vault_root)?; - - if let Some(parent) = target.parent() { - std::fs::create_dir_all(parent)?; - } + let file_path = tokio::task::spawn_blocking(move || { + let digest = ctx.generate_daily_digest()?; + let digest_str = digest.get("digest").and_then(|v| v.as_str()).unwrap_or(""); - let content = if target.exists() { - let existing = std::fs::read_to_string(&target)?; - format!("{}\n\n{}", existing, digest) - } else { - format!("---\ndate: {}\ntags: [\"daily\"]\n---\n\n{}", today, digest) - }; + let target = resolve_vault_path(&rel_path, &vault_root)?; - std::fs::write(&target, content)?; - anyhow::Ok(target.to_string_lossy().to_string()) + if let Some(parent) = target.parent() { + std::fs::create_dir_all(parent)?; } + + let content = if target.exists() { + let existing = std::fs::read_to_string(&target)?; + format!("{}\n\n{}", existing, digest_str) + } else { + format!("---\ndate: {}\ntags: [\"daily\"]\n---\n\n{}", today_owned, digest_str) + }; + + std::fs::write(&target, content)?; + anyhow::Ok(target.to_string_lossy().to_string()) }) .await .map_err(|e| anyhow::anyhow!("spawn_blocking failed: {}", e))??; @@ -468,110 +451,10 @@ Returns: JSON with nodes (id, title) and edges (source, target)."#, ) -> anyhow::Result { let repo_id = args.get("repo_id").and_then(|v| v.as_str()).map(|s| s.to_string()); - let vault_dir = ctx.storage.workspace_dir().ok().map(|ws| ws.join("vault")); - let graph = tokio::task::spawn_blocking({ - let repo_id = repo_id.clone(); - let vault_dir = vault_dir.clone(); - move || { - let Some(vd) = vault_dir else { - return anyhow::Ok(serde_json::json!({ - "success": true, - "count": 0, - "edge_count": 0, - "nodes": [], - "edges": [], - })); - }; - - let index = crate::vault::backlinks::build_backlink_index(&vd)?; - - let mut id_to_title: std::collections::HashMap = - std::collections::HashMap::new(); - let mut id_to_repo: std::collections::HashMap = - std::collections::HashMap::new(); - - for entry in walkdir::WalkDir::new(&vd) - .follow_links(false) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.file_type().is_file()) - .filter(|e| e.path().extension().map(|ext| ext == "md").unwrap_or(false)) - { - let path = entry.path(); - let rel_path = path.strip_prefix(&vd).unwrap_or(path); - let id = rel_path.to_string_lossy().replace('\\', "/"); - - let content = match std::fs::read_to_string(path) { - Ok(c) => c, - Err(_) => continue, - }; - - if let Some((fm, _)) = crate::vault::frontmatter::extract_frontmatter(&content) - { - id_to_title.insert(id.clone(), fm.title.unwrap_or_else(|| id.clone())); - if let Some(repo) = fm.repo { - id_to_repo.insert(id, repo); - } - } else { - id_to_title.insert(id.clone(), id.clone()); - } - } - - let allowed_ids: std::collections::HashSet = if let Some(ref rid) = repo_id - { - id_to_repo.iter().filter(|(_, r)| *r == rid).map(|(id, _)| id.clone()).collect() - } else { - id_to_title.keys().cloned().collect() - }; - - // Normalize wikilink targets (e.g. "b" -> "b.md") to vault file ids. - let mut id_lookup: std::collections::HashMap = - std::collections::HashMap::new(); - for id in id_to_title.keys() { - id_lookup.insert(id.clone(), id.clone()); - if let Some(stem) = id.strip_suffix(".md") { - id_lookup.insert(stem.to_string(), id.clone()); - } - } - - let nodes: Vec<_> = allowed_ids - .iter() - .map(|id| { - serde_json::json!({ - "id": id, - "title": id_to_title.get(id).unwrap_or(id), - }) - }) - .collect(); - - let mut edges = Vec::new(); - for (target, sources) in &index { - let normalized = - id_lookup.get(target).cloned().unwrap_or_else(|| target.clone()); - if !allowed_ids.contains(&normalized) { - continue; - } - for source in sources { - if allowed_ids.contains(source) { - edges.push(serde_json::json!({ - "source": source, - "target": &normalized, - })); - } - } - } - - anyhow::Ok(serde_json::json!({ - "success": true, - "count": nodes.len(), - "edge_count": edges.len(), - "nodes": nodes, - "edges": edges, - })) - } - }) - .await - .map_err(|e| anyhow::anyhow!("spawn_blocking failed: {}", e))??; + let ctx = ctx.clone(); + let graph = tokio::task::spawn_blocking(move || ctx.build_vault_graph(repo_id.as_deref())) + .await + .map_err(|e| anyhow::anyhow!("spawn_blocking failed: {}", e))??; Ok(graph) } diff --git a/src/mcp/tools/workflow.rs b/src/mcp/tools/workflow.rs index bb17bc7..0dfd7bb 100644 --- a/src/mcp/tools/workflow.rs +++ b/src/mcp/tools/workflow.rs @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +use crate::clients::WorkflowClient; use crate::mcp::McpTool; -use std::collections::HashMap; #[derive(Clone)] pub struct DevkitWorkflowListTool; @@ -35,23 +35,7 @@ Returns: JSON array of workflows with id, name, and version."#, _args: serde_json::Value, ctx: &mut crate::storage::AppContext, ) -> anyhow::Result { - let conn = ctx.conn()?; - let workflows = crate::workflow::state::list_workflows(&conn)?; - let items: Vec = workflows - .into_iter() - .map(|(id, name, version)| { - serde_json::json!({ - "id": id, - "name": name, - "version": version - }) - }) - .collect(); - Ok(serde_json::json!({ - "success": true, - "count": items.len(), - "workflows": items - })) + ctx.list_workflows() } } @@ -104,81 +88,7 @@ Returns: execution summary with status, step results, and execution_id."#, })); } - let conn = ctx.conn()?; - let wf = match crate::workflow::state::get_workflow(&conn, &workflow_id)? { - Some(wf) => wf, - None => { - return Ok(serde_json::json!({ - "success": false, - "error": format!("workflow '{}' not found", workflow_id) - })); - } - }; - - // Parse inputs into HashMap - let inputs: HashMap = if let Some(obj) = inputs_value.as_object() { - obj.iter() - .filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string()))) - .collect() - } else { - HashMap::new() - }; - - let inputs_json = inputs_value.to_string(); - let exec_id = crate::workflow::state::create_execution(&conn, &workflow_id, &inputs_json)?; - crate::workflow::state::update_execution( - &conn, - exec_id, - &crate::workflow::model::ExecutionStatus::Running, - None, - None, - )?; - - let pool = ctx.pool(); - let start = std::time::Instant::now(); - let result = crate::workflow::executor::execute_workflow(&conn, &pool, &wf, inputs); - let duration_ms = start.elapsed().as_millis() as i64; - - match result { - Ok(step_results) => { - crate::workflow::state::update_execution( - &conn, - exec_id, - &crate::workflow::model::ExecutionStatus::Completed, - None, - Some(duration_ms), - )?; - let results_json: HashMap = step_results - .into_iter() - .map(|(k, v)| (k, serde_json::to_value(v).unwrap_or(serde_json::json!(null)))) - .collect(); - Ok(serde_json::json!({ - "success": true, - "execution_id": exec_id, - "workflow_id": workflow_id, - "status": "Completed", - "duration_ms": duration_ms, - "step_results": results_json - })) - } - Err(e) => { - crate::workflow::state::update_execution( - &conn, - exec_id, - &crate::workflow::model::ExecutionStatus::Failed, - None, - Some(duration_ms), - )?; - Ok(serde_json::json!({ - "success": false, - "execution_id": exec_id, - "workflow_id": workflow_id, - "status": "Failed", - "duration_ms": duration_ms, - "error": e.to_string() - })) - } - } + ctx.run_workflow(&workflow_id, inputs_value) } } @@ -227,24 +137,7 @@ Returns: execution record with status, current_step, timestamps, and duration."# })); } - let conn = ctx.conn()?; - match crate::workflow::state::get_execution(&conn, exec_id)? { - Some(exec) => Ok(serde_json::json!({ - "success": true, - "execution_id": exec.id, - "workflow_id": exec.workflow_id, - "status": format!("{:?}", exec.status), - "current_step": exec.current_step, - "started_at": exec.started_at, - "finished_at": exec.finished_at, - "duration_ms": exec.duration_ms, - "inputs": exec.inputs_json - })), - None => Ok(serde_json::json!({ - "success": false, - "error": format!("execution {} not found", exec_id) - })), - } + ctx.get_execution(exec_id) } } diff --git a/src/registry.rs b/src/registry.rs index 0d89121..1ffaccc 100644 --- a/src/registry.rs +++ b/src/registry.rs @@ -382,6 +382,131 @@ impl crate::clients::RegistryClient for crate::storage::AppContext { "dead_functions": out })) } + + fn save_relation( + &self, + from: &str, + to: &str, + relation_type: &str, + confidence: f64, + ) -> anyhow::Result { + let conn = self.conn()?; + crate::registry::relation::save_relation(&conn, from, to, relation_type, confidence)?; + Ok(serde_json::json!({ "success": true })) + } + + fn query_relations( + &self, + entity_id: &str, + direction: &str, + relation_type: Option<&str>, + ) -> anyhow::Result { + let conn = self.conn()?; + let results = match direction { + "bidirectional" => { + let rows = crate::registry::relation::find_related_entities( + &conn, + entity_id, + relation_type, + )?; + rows.into_iter() + .map(|(from, to, rt, conf, created)| { + serde_json::json!({ + "from_entity_id": from, + "to_entity_id": to, + "relation_type": rt, + "confidence": conf, + "created_at": created + }) + }) + .collect::>() + } + "incoming" => { + let mut stmt = conn.prepare( + "SELECT from_entity_id, relation_type, confidence, created_at FROM relations + WHERE to_entity_id = ?1 + ORDER BY confidence DESC", + )?; + let rows = stmt.query_map([entity_id], |row| { + Ok(( + row.get::<_, String>(0)?, + row.get::<_, String>(1)?, + row.get::<_, f64>(2)?, + row.get::<_, String>(3)?, + )) + })?; + let filtered: Vec<_> = if let Some(rt) = relation_type.filter(|s| !s.is_empty()) { + rows.filter(|r| r.as_ref().map(|(_, t, _, _)| t == rt).unwrap_or(false)) + .collect::, _>>()? + } else { + rows.collect::, _>>()? + }; + filtered + .into_iter() + .map(|(from, rt, conf, created)| { + serde_json::json!({ + "from_entity_id": from, + "relation_type": rt, + "confidence": conf, + "created_at": created + }) + }) + .collect::>() + } + _ => { + let rows = + crate::registry::relation::list_relations(&conn, entity_id, relation_type)?; + rows.into_iter() + .map(|(to, rt, conf, created)| { + serde_json::json!({ + "to_entity_id": to, + "relation_type": rt, + "confidence": conf, + "created_at": created + }) + }) + .collect::>() + } + }; + Ok(serde_json::json!({ "success": true, "relations": results })) + } + + fn delete_relations( + &self, + from: &str, + to: &str, + relation_type: Option<&str>, + ) -> anyhow::Result { + let conn = self.conn()?; + let count = match relation_type.filter(|s| !s.is_empty()) { + Some(rt) => conn.execute( + "DELETE FROM relations WHERE from_entity_id = ?1 AND to_entity_id = ?2 AND relation_type = ?3", + rusqlite::params![from, to, rt], + )?, + None => conn.execute( + "DELETE FROM relations WHERE from_entity_id = ?1 AND to_entity_id = ?2", + rusqlite::params![from, to], + )?, + }; + Ok(serde_json::json!({ "success": true, "deleted": count })) + } + + fn list_vault_notes(&self) -> anyhow::Result { + let conn = self.conn()?; + let notes = crate::registry::vault::list_vault_notes(&conn)?; + let results: Vec = notes + .into_iter() + .map(|n| { + serde_json::json!({ + "id": n.id, + "path": n.path, + "title": n.title, + "tags": n.tags, + }) + }) + .collect(); + Ok(serde_json::json!({ "success": true, "count": results.len(), "notes": results })) + } } #[cfg(test)] diff --git a/src/storage.rs b/src/storage.rs index c5004f1..4f14713 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -116,12 +116,13 @@ impl StorageBackend for DefaultStorageBackend { /// /// 命令处理函数应通过此结构体获取所有外部依赖, /// 避免直接调用全局函数或读取环境变量。 +#[derive(Clone)] pub struct AppContext { pub storage: Arc, pub config: Config, pub i18n: I18n, pool: Pool, - env_cache: std::sync::Mutex, + env_cache: Arc>, } impl AppContext { @@ -146,7 +147,7 @@ impl AppContext { config, i18n, pool, - env_cache: std::sync::Mutex::new(EnvVersionCache::default()), + env_cache: Arc::new(std::sync::Mutex::new(EnvVersionCache::default())), }) } @@ -169,7 +170,7 @@ impl AppContext { config, i18n, pool, - env_cache: std::sync::Mutex::new(EnvVersionCache::default()), + env_cache: Arc::new(std::sync::Mutex::new(EnvVersionCache::default())), }) } diff --git a/src/vault/mod.rs b/src/vault/mod.rs index 5168b14..91e130a 100644 --- a/src/vault/mod.rs +++ b/src/vault/mod.rs @@ -6,3 +6,148 @@ pub mod fs_io; pub mod indexer; pub mod scanner; pub mod wikilink; + +use crate::storage::AppContext; + +impl crate::clients::VaultClient for AppContext { + fn list_vault_notes(&self) -> anyhow::Result { + let conn = self.conn()?; + let notes = crate::registry::vault::list_vault_notes(&conn)?; + let results: Vec = notes + .into_iter() + .map(|n| { + serde_json::json!({ + "id": n.id, + "path": n.path, + "title": n.title, + "tags": n.tags, + }) + }) + .collect(); + Ok(serde_json::json!({"success": true, "count": results.len(), "notes": results})) + } + + fn read_vault_note(&self, path: &str) -> anyhow::Result { + let (body, frontmatter) = fs_io::read_note_body(path) + .ok_or_else(|| anyhow::anyhow!("note not found or unreadable"))?; + Ok(serde_json::json!({ + "success": true, + "path": path, + "content": body, + "frontmatter": frontmatter, + })) + } + + fn get_backlinks(&self, note_id: &str) -> anyhow::Result { + let vault_dir = self.storage.workspace_dir().ok().map(|ws| ws.join("vault")); + let backlinks = if let Some(vd) = vault_dir { + match backlinks::build_backlink_index(&vd) { + Ok(index) => backlinks::get_backlinks(&index, note_id), + Err(_) => Vec::new(), + } + } else { + Vec::new() + }; + Ok(serde_json::json!({ + "success": true, + "target": note_id, + "count": backlinks.len(), + "backlinks": backlinks, + })) + } + + fn build_vault_graph(&self, repo_id: Option<&str>) -> anyhow::Result { + let vault_dir = self.storage.workspace_dir().ok().map(|ws| ws.join("vault")); + let Some(vd) = vault_dir else { + return Ok(serde_json::json!({ + "success": true, + "count": 0, + "edge_count": 0, + "nodes": [], + "edges": [], + })); + }; + + let index = backlinks::build_backlink_index(&vd)?; + + let mut id_to_title: std::collections::HashMap = + std::collections::HashMap::new(); + let mut id_to_repo: std::collections::HashMap = + std::collections::HashMap::new(); + + for entry in walkdir::WalkDir::new(&vd) + .follow_links(false) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| e.file_type().is_file()) + .filter(|e| e.path().extension().map(|ext| ext == "md").unwrap_or(false)) + { + let path = entry.path(); + let rel_path = path.strip_prefix(&vd).unwrap_or(path); + let id = rel_path.to_string_lossy().replace('\\', "/"); + + let content = match std::fs::read_to_string(path) { + Ok(c) => c, + Err(_) => continue, + }; + + if let Some((fm, _)) = frontmatter::extract_frontmatter(&content) { + id_to_title.insert(id.clone(), fm.title.unwrap_or_else(|| id.clone())); + if let Some(repo) = fm.repo { + id_to_repo.insert(id, repo); + } + } else { + id_to_title.insert(id.clone(), id.clone()); + } + } + + let allowed_ids: std::collections::HashSet = if let Some(rid) = repo_id { + id_to_repo.iter().filter(|(_, r)| *r == rid).map(|(id, _)| id.clone()).collect() + } else { + id_to_title.keys().cloned().collect() + }; + + let mut id_lookup: std::collections::HashMap = + std::collections::HashMap::new(); + for id in id_to_title.keys() { + id_lookup.insert(id.clone(), id.clone()); + if let Some(stem) = id.strip_suffix(".md") { + id_lookup.insert(stem.to_string(), id.clone()); + } + } + + let nodes: Vec<_> = allowed_ids + .iter() + .map(|id| { + serde_json::json!({ + "id": id, + "title": id_to_title.get(id).unwrap_or(id), + }) + }) + .collect(); + + let mut edges = Vec::new(); + for (target, sources) in &index { + let normalized = id_lookup.get(target).cloned().unwrap_or_else(|| target.clone()); + if !allowed_ids.contains(&normalized) { + continue; + } + for source in sources { + if allowed_ids.contains(source) { + edges.push(serde_json::json!({ + "source": source, + "target": &normalized, + })); + } + } + } + + Ok(serde_json::json!({ + "success": true, + "count": nodes.len(), + "edge_count": edges.len(), + "nodes": nodes, + "edges": edges, + })) + } +} diff --git a/src/workflow/mod.rs b/src/workflow/mod.rs index b66cc28..eb534bc 100644 --- a/src/workflow/mod.rs +++ b/src/workflow/mod.rs @@ -17,3 +17,134 @@ pub use state::{ update_execution, }; pub use validator::validate_workflow; + +use crate::storage::AppContext; + +impl crate::clients::WorkflowClient for AppContext { + fn list_workflows(&self) -> anyhow::Result { + let conn = self.conn()?; + let workflows = state::list_workflows(&conn)?; + let items: Vec = workflows + .into_iter() + .map(|(id, name, version)| { + serde_json::json!({"id": id, "name": name, "version": version}) + }) + .collect(); + Ok(serde_json::json!({"success": true, "count": items.len(), "workflows": items})) + } + + fn get_workflow(&self, workflow_id: &str) -> anyhow::Result { + let conn = self.conn()?; + match state::get_workflow(&conn, workflow_id)? { + Some(wf) => Ok(serde_json::json!({ + "success": true, + "id": wf.id, + "name": wf.name, + "version": wf.version, + "description": wf.description, + "steps": wf.steps.len(), + })), + None => Ok(serde_json::json!({"success": false, "error": "workflow not found"})), + } + } + + fn run_workflow( + &self, + workflow_id: &str, + inputs: serde_json::Value, + ) -> anyhow::Result { + let conn = self.conn()?; + let wf = match state::get_workflow(&conn, workflow_id)? { + Some(wf) => wf, + None => { + return Ok(serde_json::json!({ + "success": false, + "error": format!("workflow '{}' not found", workflow_id) + })); + } + }; + + let inputs_map: std::collections::HashMap = + if let Some(obj) = inputs.as_object() { + obj.iter() + .filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string()))) + .collect() + } else { + std::collections::HashMap::new() + }; + + let inputs_json = inputs.to_string(); + let exec_id = state::create_execution(&conn, workflow_id, &inputs_json)?; + state::update_execution(&conn, exec_id, &model::ExecutionStatus::Running, None, None)?; + + let pool = self.pool(); + let start = std::time::Instant::now(); + let result = executor::execute_workflow(&conn, &pool, &wf, inputs_map); + let duration_ms = start.elapsed().as_millis() as i64; + + match result { + Ok(step_results) => { + state::update_execution( + &conn, + exec_id, + &model::ExecutionStatus::Completed, + None, + Some(duration_ms), + )?; + let results_json: std::collections::HashMap = + step_results + .into_iter() + .map(|(k, v)| { + (k, serde_json::to_value(v).unwrap_or(serde_json::json!(null))) + }) + .collect(); + Ok(serde_json::json!({ + "success": true, + "execution_id": exec_id, + "workflow_id": workflow_id, + "status": "Completed", + "duration_ms": duration_ms, + "step_results": results_json + })) + } + Err(e) => { + state::update_execution( + &conn, + exec_id, + &model::ExecutionStatus::Failed, + None, + Some(duration_ms), + )?; + Ok(serde_json::json!({ + "success": false, + "execution_id": exec_id, + "workflow_id": workflow_id, + "status": "Failed", + "duration_ms": duration_ms, + "error": e.to_string() + })) + } + } + } + + fn get_execution(&self, exec_id: i64) -> anyhow::Result { + let conn = self.conn()?; + match state::get_execution(&conn, exec_id)? { + Some(exec) => Ok(serde_json::json!({ + "success": true, + "execution_id": exec.id, + "workflow_id": exec.workflow_id, + "status": format!("{:?}", exec.status), + "current_step": exec.current_step, + "started_at": exec.started_at, + "finished_at": exec.finished_at, + "duration_ms": exec.duration_ms, + "inputs": exec.inputs_json, + })), + None => Ok(serde_json::json!({ + "success": false, + "error": format!("execution {} not found", exec_id) + })), + } + } +} From 8a36e72d18bcc3e37fe32b4c27b21ad789abf4b8 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Mon, 11 May 2026 21:12:48 +0800 Subject: [PATCH 02/17] refactor(knowledge_engine): extract Config load + prepare_repos, eliminate loop-level I/O - index_repo: accept Config by parameter instead of loading internally - daemon.rs: load Config once before indexing loop - run_index_with_progress: hoist Config::load() out of repo loop - Extract prepare_repos() pure helper for path resolution / auto-registration - Result: ~20 fewer inline crate:: calls, eliminated repeated disk I/O in hot loop --- src/daemon.rs | 3 ++- src/knowledge_engine/index.rs | 47 +++++++++++++++++++++-------------- 2 files changed, 30 insertions(+), 20 deletions(-) diff --git a/src/daemon.rs b/src/daemon.rs index 6ad0bc8..746a8db 100644 --- a/src/daemon.rs +++ b/src/daemon.rs @@ -105,6 +105,7 @@ impl Daemon { let pool = self.pool.clone(); match tokio::task::spawn_blocking(move || { let mut conn = pool.get()?; + let config = crate::config::Config::load().ok(); let repos = if let Some(threshold) = index_threshold { crate::registry::repo::list_repos_need_index(&conn, &threshold)? } else { @@ -112,7 +113,7 @@ impl Daemon { }; let mut count = 0; for repo in repos { - if let Err(e) = index_repo(&mut conn, &repo) { + if let Err(e) = index_repo(&mut conn, &repo, config.as_ref()) { tracing::warn!("Failed to index {}: {}", repo.id, e); } else { count += 1; diff --git a/src/knowledge_engine/index.rs b/src/knowledge_engine/index.rs index 2c59804..fb668be 100644 --- a/src/knowledge_engine/index.rs +++ b/src/knowledge_engine/index.rs @@ -20,10 +20,10 @@ fn index_repo_in_search( pub fn index_repo( conn: &mut rusqlite::Connection, repo: &crate::registry::RepoEntry, + config: Option<&crate::config::Config>, ) -> anyhow::Result<()> { use tracing::{info, warn}; - let config = crate::config::Config::load().ok(); let (summary, keywords) = config .as_ref() .and_then(|cfg| super::try_llm_summary(&repo.local_path, &cfg.llm)) @@ -66,6 +66,31 @@ pub fn run_index( run_index_with_progress(conn, path, None, skip_embeddings) } +/// Resolve the list of repositories to index for a given path. +/// If `path` is empty, returns all registered repos. +/// If `path` points to an unregistered repo, auto-registers it before returning. +fn prepare_repos(conn: &mut rusqlite::Connection, path: &str) -> anyhow::Result> { + use tracing::info; + + if path.is_empty() { + return crate::registry::repo::list_repos(conn); + } + + let p = PathBuf::from(path); + if !p.exists() { + anyhow::bail!("Path does not exist: {}", path); + } + let registered = crate::registry::repo::list_repos(conn)?; + if let Some(repo) = registered.into_iter().find(|r| r.local_path == p) { + Ok(vec![repo]) + } else { + info!("Registering {} before indexing", path); + let repo = crate::scan::inspect_repo(&p, None)?; + crate::registry::repo::save_repo(conn, &repo)?; + Ok(vec![repo]) + } +} + /// 带进度上报的索引逻辑。 /// `progress_tx` 接收阶段性进度消息,用于 MCP streaming 等实时反馈场景。 pub fn run_index_with_progress( @@ -82,23 +107,7 @@ pub fn run_index_with_progress( } }; - let repos: Vec = if path.is_empty() { - crate::registry::repo::list_repos(conn)? - } else { - let p = PathBuf::from(path); - if !p.exists() { - anyhow::bail!("Path does not exist: {}", path); - } - let registered = crate::registry::repo::list_repos(conn)?; - if let Some(repo) = registered.into_iter().find(|r| r.local_path == p) { - vec![repo] - } else { - info!("Registering {} before indexing", path); - let repo = crate::scan::inspect_repo(&p, None)?; - crate::registry::repo::save_repo(conn, &repo)?; - vec![repo] - } - }; + let repos = prepare_repos(conn, path)?; // Initialize Tantivy search index writer once for the batch let (search_index, _reader) = crate::search::init_index()?; @@ -112,10 +121,10 @@ pub fn run_index_with_progress( .filter_map(Result::ok) .collect(); + let config = crate::config::Config::load().ok(); let mut count = 0; for repo in &repos { let t0 = std::time::Instant::now(); - let config = crate::config::Config::load().ok(); let (summary, keywords) = config .as_ref() .and_then(|cfg| super::try_llm_summary(&repo.local_path, &cfg.llm)) From a9bea887ba8324084e6a2c3485a96326f9fe01fa Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Mon, 11 May 2026 21:41:42 +0800 Subject: [PATCH 03/17] perf(knowledge_engine): reuse Tantivy writer across daemon batch indexing - Extract index_repo_core() with explicit writer/schema parameters - index_repo() retains legacy behavior (creates standalone writer) - Add index_repo_with_writer() for batch callers - daemon.rs: init writer once before loop, commit once after loop - Eliminates N-1 redundant Tantivy init/get_writer/commit cycles --- src/daemon.rs | 16 +++++++++-- src/knowledge_engine/index.rs | 50 ++++++++++++++++++++++++----------- 2 files changed, 48 insertions(+), 18 deletions(-) diff --git a/src/daemon.rs b/src/daemon.rs index 746a8db..20891f6 100644 --- a/src/daemon.rs +++ b/src/daemon.rs @@ -5,7 +5,6 @@ use crate::digest::generate_daily_digest; use crate::discovery_engine::{discover_dependencies, discover_similar_projects}; use crate::health::analyze_repo; use crate::i18n::from_language; -use crate::knowledge_engine::index_repo; use crate::registry::{ HealthEntry, health as reg_health, knowledge as reg_knowledge, relation as reg_relation, repo, }; @@ -111,14 +110,27 @@ impl Daemon { } else { crate::registry::repo::list_repos(&conn)? }; + // Batch-index: reuse a single Tantivy writer across all repos + let (search_index, _reader) = crate::search::init_index()?; + let mut writer = crate::search::get_writer(&search_index)?; + let schema = search_index.schema(); let mut count = 0; for repo in repos { - if let Err(e) = index_repo(&mut conn, &repo, config.as_ref()) { + if let Err(e) = crate::knowledge_engine::index_repo_with_writer( + &mut conn, + &repo, + config.as_ref(), + &mut writer, + &schema, + ) { tracing::warn!("Failed to index {}: {}", repo.id, e); } else { count += 1; } } + if let Err(e) = crate::search::commit_writer(&mut writer) { + tracing::warn!("Failed to commit search index: {}", e); + } Ok::<_, anyhow::Error>(count) }) .await diff --git a/src/knowledge_engine/index.rs b/src/knowledge_engine/index.rs index fb668be..21183b9 100644 --- a/src/knowledge_engine/index.rs +++ b/src/knowledge_engine/index.rs @@ -2,25 +2,14 @@ // Copyright (c) 2026 juice094 use crate::registry::RepoEntry; use std::path::PathBuf; +use tantivy::{IndexWriter, schema::Schema}; -fn index_repo_in_search( - repo: &crate::registry::RepoEntry, - summary: &str, - keywords: &str, -) -> anyhow::Result<()> { - let (index, _reader) = crate::search::init_index()?; - let mut writer = crate::search::get_writer(&index)?; - let schema = index.schema(); - crate::search::delete_repo_doc(&mut writer, &schema, &repo.id)?; - crate::search::add_repo_doc(&mut writer, &schema, &repo.id, summary, keywords, &repo.tags)?; - crate::search::commit_writer(&mut writer)?; - Ok(()) -} - -pub fn index_repo( +fn index_repo_core( conn: &mut rusqlite::Connection, repo: &crate::registry::RepoEntry, config: Option<&crate::config::Config>, + writer: &mut IndexWriter, + schema: &Schema, ) -> anyhow::Result<()> { use tracing::{info, warn}; @@ -37,7 +26,9 @@ pub fn index_repo( crate::registry::knowledge::save_summary(conn, &repo.id, &summary, &keywords)?; - if let Err(e) = index_repo_in_search(repo, &summary, &keywords) { + if let Err(e) = crate::search::delete_repo_doc(writer, schema, &repo.id).and_then(|_| { + crate::search::add_repo_doc(writer, schema, &repo.id, &summary, &keywords, &repo.tags) + }) { warn!("Failed to index repo in search: {}", e); } @@ -57,6 +48,33 @@ pub fn index_repo( Ok(()) } +/// Index a single repo with a standalone Tantivy writer. +/// Suitable for one-off indexing where writer reuse is not needed. +pub fn index_repo( + conn: &mut rusqlite::Connection, + repo: &crate::registry::RepoEntry, + config: Option<&crate::config::Config>, +) -> anyhow::Result<()> { + let (index, _reader) = crate::search::init_index()?; + let mut writer = crate::search::get_writer(&index)?; + let schema = index.schema(); + index_repo_core(conn, repo, config, &mut writer, &schema)?; + crate::search::commit_writer(&mut writer)?; + Ok(()) +} + +/// Index a single repo reusing an existing Tantivy writer. +/// Callers must commit the writer after the batch. +pub fn index_repo_with_writer( + conn: &mut rusqlite::Connection, + repo: &crate::registry::RepoEntry, + config: Option<&crate::config::Config>, + writer: &mut IndexWriter, + schema: &Schema, +) -> anyhow::Result<()> { + index_repo_core(conn, repo, config, writer, schema) +} + /// 兼容旧调用的包装层:执行索引逻辑 pub fn run_index( conn: &mut rusqlite::Connection, From 507fc44c8df53f70df336cba4f44ecf2f911adf1 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Mon, 11 May 2026 22:05:46 +0800 Subject: [PATCH 04/17] docs: module rustdoc + ADR-004/005 + i18n dead_code clarification - Add rustdoc to 7 core modules: i18n, knowledge_engine, workflow, registry, storage, daemon, embedding - Clarify rationale in i18n/mod.rs - Create ADR-004: MCP Tool Layer Trait Decoupling - Create ADR-005: AppContext Clone for Async Context Propagation - Update ADR index with ADR-003/004/005 --- .../adr-004-mcp-trait-decoupling.md | 34 +++++++++++++++++++ docs/architecture/adr-005-appcontext-clone.md | 34 +++++++++++++++++++ docs/architecture/adr-template.md | 3 ++ src/daemon.rs | 7 ++++ src/embedding.rs | 8 +++-- src/i18n/mod.rs | 10 ++++++ src/knowledge_engine/mod.rs | 10 ++++++ src/registry.rs | 7 ++++ src/storage.rs | 8 +++++ src/workflow/mod.rs | 10 ++++++ 10 files changed, 129 insertions(+), 2 deletions(-) create mode 100644 docs/architecture/adr-004-mcp-trait-decoupling.md create mode 100644 docs/architecture/adr-005-appcontext-clone.md diff --git a/docs/architecture/adr-004-mcp-trait-decoupling.md b/docs/architecture/adr-004-mcp-trait-decoupling.md new file mode 100644 index 0000000..b35acec --- /dev/null +++ b/docs/architecture/adr-004-mcp-trait-decoupling.md @@ -0,0 +1,34 @@ +# ADR-004: MCP Tool Layer Trait Decoupling + +- **状态**: accepted +- **日期**: 2026-05-11 +- **作者**: devbase 架构优化会话 + +## 上下文 + +`src/mcp/tools/` 中的 MCP 工具实现直接内联调用 `crate::health::`、`crate::search::`、`crate::registry::` 等底层模块,导致: +- 工具层与业务层硬耦合,无法独立测试 +- `repo.rs` 等文件 `crate::` 内联引用超过 10 处,违反架构红线 +- 新增工具时容易引入隐式依赖 + +## 决策 + +为每个业务领域定义 trait(`ScanClient`、`HealthClient`、`RegistryClient`、`KnowledgeClient`、`SearchClient`、`RepoAnalyzer` 等),由 `AppContext` 统一实现,MCP 工具只依赖 trait。 + +## 后果 + +- **正面**: `repo.rs` `crate::` 引用从 11 降至 8(全部集中在 use 语句);工具层可独立单元测试;新增领域只需扩展 trait +- **负面**: trait 定义与实现分属不同文件,跳转成本略增;简单查询也需 trait 封装 +- **风险**: 过度抽象可能导致 trait 膨胀;需定期审查 trait 方法是否仍被使用 + +## 备选方案 + +| 方案 | 不选原因 | +|------|---------| +| 保持现状,仅清理 use 语句 | 未解决测试隔离问题 | +| 每个工具独立 service struct | 与现有 `AppContext` 模式冲突,引入更多类型 | + +## 相关决策 + +- 依赖:ADR-001(单 crate 模型使 trait 定义零成本) +- 被依赖:ADR-005(AppContext Clone 是 trait 在 spawn_blocking 中使用的前提) diff --git a/docs/architecture/adr-005-appcontext-clone.md b/docs/architecture/adr-005-appcontext-clone.md new file mode 100644 index 0000000..fe867e6 --- /dev/null +++ b/docs/architecture/adr-005-appcontext-clone.md @@ -0,0 +1,34 @@ +# ADR-005: AppContext Clone for Async Context Propagation + +- **状态**: accepted +- **日期**: 2026-05-11 +- **作者**: devbase 架构优化会话 + +## 上下文 + +MCP 工具频繁使用 `tokio::task::spawn_blocking` 执行 I/O 密集型操作(Tantivy 索引、SQLite 查询、文件系统遍历)。此前 `AppContext` 未实现 `Clone`,导致: +- 闭包内无法调用 `ctx.list_vault_notes()` 等 trait 方法 +- 被迫在 `spawn_blocking` 外获取 `conn` 再 move 进闭包,增加生命周期复杂度 +- `VaultClient`、`WorkflowClient` 等 trait 难以在异步上下文中使用 + +## 决策 + +将 `AppContext.env_cache` 从 `std::sync::Mutex` 改为 `Arc>`,并为 `AppContext` 添加 `#[derive(Clone)]`。 + +## 后果 + +- **正面**: `spawn_blocking` 闭包内可直接 `ctx.clone()` 后调用任意 trait 方法;统一 async/sync 边界处理模式 +- **负面**: `Clone` 后多个上下文共享同一 `Mutex`,并发修改 env_cache 的竞争概率微增(当前仅 daemon 定期刷新,可忽略) +- **风险**: 未来若向 `AppContext` 添加非 Clone 字段,需回退到显式字段 clone 模式 + +## 备选方案 + +| 方案 | 不选原因 | +|------|---------| +| 为每个 trait 定义无状态 Impl (ZST) | `RegistryClient` 等方法需 `conn`,无状态 impl 需传入 `Connection`,违反 T11 红线 | +| 使用 `Arc` 包装 | 增加一层间接,所有调用点需改为 `arc.ctx.method()`,改动面过大 | +| 将 `Pool` 单独 clone move 进闭包 | 已在使用,但无法调用 `DigestClient` 等需要 config/i18n 的 trait 方法 | + +## 相关决策 + +- 依赖:ADR-004(trait 化后,clone 成为 spawn_blocking 中使用 trait 的基础设施) diff --git a/docs/architecture/adr-template.md b/docs/architecture/adr-template.md index a10634e..a1f4f25 100644 --- a/docs/architecture/adr-template.md +++ b/docs/architecture/adr-template.md @@ -46,6 +46,9 @@ |------|------|------|------| | ADR-001 | 单 crate 模型(defer split)| accepted | 2026-04-26 | | ADR-002 | Candle CPU BERT 单条编码(batch 回滚)| accepted | 2026-05-04 | +| ADR-003 | Tantivy + SQLite 双写一致性策略 | proposed | 2026-05-11 | +| ADR-004 | MCP Tool Layer Trait Decoupling | accepted | 2026-05-11 | +| ADR-005 | AppContext Clone for Async Context Propagation | accepted | 2026-05-11 | ### ADR-001: 单 crate 模型(defer split) diff --git a/src/daemon.rs b/src/daemon.rs index 20891f6..039ffb7 100644 --- a/src/daemon.rs +++ b/src/daemon.rs @@ -1,5 +1,12 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +//! Background daemon: periodic health checks, re-indexing, discovery, +//! daily digest generation, and relation graph maintenance. +//! +//! The daemon runs on a configurable schedule (`daemon.interval_seconds`) +//! and uses `tokio::spawn_blocking` for CPU- or I/O-heavy tasks to avoid +//! blocking the async runtime. + use crate::config::Config; use crate::digest::generate_daily_digest; use crate::discovery_engine::{discover_dependencies, discover_similar_projects}; diff --git a/src/embedding.rs b/src/embedding.rs index cc0d6d3..7472121 100644 --- a/src/embedding.rs +++ b/src/embedding.rs @@ -1,7 +1,11 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 -// RE-EXPORT ONLY — 实现已迁移至 devbase-embedding crate. -// 禁止在本文件中添加新代码。 +//! Embedding generation: local Candle-based BERT inference for code symbols. +//! +//! **Re-export only** — implementation lives in the `devbase-embedding` crate. +//! Do not add new code here; extend the extracted crate instead. +//! +//! Feature-gated behind `embedding`; disabled by default to reduce binary size. #[cfg(feature = "embedding")] pub use devbase_embedding::*; diff --git a/src/i18n/mod.rs b/src/i18n/mod.rs index f7e6c8b..654d819 100644 --- a/src/i18n/mod.rs +++ b/src/i18n/mod.rs @@ -1,5 +1,15 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +//! Internationalization (i18n) layer for devbase. +//! +//! Provides language-specific UI strings for TUI, CLI, sync reports, and logs. +//! Supported languages: English (`en`) and Simplified Chinese (`zh_cn`). +//! +//! **Note on `#[allow(dead_code)]`**: Many string fields are accessed only when +//! the `tui` feature is enabled. Without this attribute, compiling without +//! `--features tui` would produce spurious dead-code warnings. The fields are +//! actively used in production builds with the default feature set. + #[derive(Clone, Copy)] #[allow(dead_code)] pub struct I18n { diff --git a/src/knowledge_engine/mod.rs b/src/knowledge_engine/mod.rs index f482e79..c8418ec 100644 --- a/src/knowledge_engine/mod.rs +++ b/src/knowledge_engine/mod.rs @@ -1,5 +1,15 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +//! Knowledge engine: repository indexing, summary extraction, and module analysis. +//! +//! Orchestrates Tantivy full-text indexing, SQLite registry persistence, +//! semantic code indexing (AST + call graph), and optional embedding generation. +//! +//! Entry points: +//! - [`run_index`] — batch index all registered repos or a single path +//! - [`index_repo`] — index a single repo (standalone writer) +//! - [`index_repo_with_writer`] — index a single repo reusing an existing writer + pub mod fallback; pub mod index; pub mod index_state; diff --git a/src/registry.rs b/src/registry.rs index 1ffaccc..d4967cb 100644 --- a/src/registry.rs +++ b/src/registry.rs @@ -1,5 +1,12 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +//! Registry layer: SQLite-backed entity storage and domain-specific submodules. +//! +//! Central types (`RepoEntry`, `VaultNote`, `PaperEntry`, etc.) and the +//! [`RegistryClient`] trait implementation on [`AppContext`]. +//! Submodules cover repos, health, knowledge, code metrics, call graphs, +//! dead-code analysis, and migrations. + use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::path::PathBuf; diff --git a/src/storage.rs b/src/storage.rs index 4f14713..db90611 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -1,5 +1,13 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +//! Storage abstraction and application context (`AppContext`). +//! +//! [`StorageBackend`] decouples concrete paths from consumers, enabling +//! test isolation via [`TempStorageBackend`] and future remote backends. +//! [`AppContext`] is the central dependency-injection container: it holds +//! the storage backend, database pool, config, i18n, and environment cache, +//! and implements all MCP client traits (`ScanClient`, `HealthClient`, etc.). + use crate::config::Config; use crate::i18n::{I18n, from_language}; use crate::registry::{ENTITY_TYPE_REPO, WorkspaceRegistry}; diff --git a/src/workflow/mod.rs b/src/workflow/mod.rs index eb534bc..be45150 100644 --- a/src/workflow/mod.rs +++ b/src/workflow/mod.rs @@ -1,5 +1,15 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +//! Workflow automation engine: YAML-defined multi-step pipelines with parallel +//! execution, variable interpolation, and SQLite-backed execution tracking. +//! +//! A workflow consists of ordered steps, each referencing a registered Skill. +//! The scheduler builds independent batches; the executor runs each batch in +//! parallel while preserving step ordering across batches. +//! +//! Key traits: +//! - [`WorkflowClient`] — MCP-facing API for listing, running, and querying workflows + pub mod executor; pub mod interpolate; pub mod model; From a3fef4cef999698f27dcf1ef7267faa359ef92c5 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Mon, 11 May 2026 22:28:21 +0800 Subject: [PATCH 05/17] docs: update project facade files (README ecosystem) - CONTRIBUTING.md: update health metrics to v0.15.0 / 427 tests / AGPL-3.0+ - SECURITY.md: update supported version to 0.15.x - Create CODE_OF_CONDUCT.md (Contributor Covenant v2.0) - Create SUPPORT.md (docs, issues, discussions, commercial support) - Create .github/PULL_REQUEST_TEMPLATE.md with checklist - Create .github/ISSUE_TEMPLATE/bug_report.md - Create .github/ISSUE_TEMPLATE/feature_request.md - Create .github/ISSUE_TEMPLATE/config.yml (disable blank issues) --- .github/ISSUE_TEMPLATE/bug_report.md | 31 +++++++++++++ .github/ISSUE_TEMPLATE/config.yml | 8 ++++ .github/ISSUE_TEMPLATE/feature_request.md | 23 ++++++++++ .github/PULL_REQUEST_TEMPLATE.md | 26 +++++++++++ CODE_OF_CONDUCT.md | 54 +++++++++++++++++++++++ CONTRIBUTING.md | 6 +-- SECURITY.md | 4 +- SUPPORT.md | 34 ++++++++++++++ 8 files changed, 181 insertions(+), 5 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 CODE_OF_CONDUCT.md create mode 100644 SUPPORT.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..4f5580a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,31 @@ +--- +name: Bug report +about: Create a report to help us improve devbase +title: '[BUG] ' +labels: bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Run `...` +2. Click on '...' +3. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Environment (please complete the following information):** + - OS: [e.g. Windows 11, macOS 14, Ubuntu 22.04] + - devbase version: [output of `devbase --version`] + - Rust version: [output of `rustc --version`] + +**Screenshots / Logs** +If applicable, add screenshots or console output to help explain your problem. + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..96f6514 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: false +contact_links: + - name: Security vulnerability + url: https://github.com/juice094/devbase/security/advisories/new + about: Please report security issues privately via GitHub Security Advisories. + - name: Question or discussion + url: https://github.com/juice094/devbase/discussions + about: For Q&A, architecture debates, or show-and-tell, use GitHub Discussions. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..9ebf1ca --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,23 @@ +--- +name: Feature request +about: Suggest an idea for devbase +title: '[Feature] ' +labels: enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Use case** +Who would benefit from this feature? How would they use it? + +**Additional context** +Add any other context, mockups, or references about the feature request here. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..6c7a08c --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,26 @@ +## Summary + + + +## Type of Change + +- [ ] Bug fix (non-breaking) +- [ ] New feature +- [ ] Breaking change +- [ ] Documentation +- [ ] Performance improvement +- [ ] Refactoring (no behavior change) + +## Checklist + +- [ ] `cargo test --all-targets` passes locally +- [ ] `cargo clippy --all-targets -D warnings` passes +- [ ] `cargo fmt --check` passes +- [ ] New code has no production `unwrap`/`expect`/`panic` (test code exempt) +- [ ] Schema changes include migration in `src/registry/migrate.rs` +- [ ] New MCP tools include tests in `src/mcp/tests.rs` +- [ ] README / AGENTS.md updated if user-facing behavior changed + +## Related Issues + + diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..ccd1b92 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,54 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +## Our Standards + +Examples of behavior that contributes to a positive environment: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes + +Examples of unacceptable behavior: + +- The use of sexualized language or imagery, and sexual attention or advances +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information without explicit permission + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +**juice094@protonmail.com**. + +All complaints will be reviewed and investigated promptly and fairly. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +[homepage]: https://www.contributor-covenant.org diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2c4d81a..e4e0cba 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,11 +8,11 @@ | 指标 | 状态 | |:---|:---| -| 版本 | v0.8.0 | -| 测试 | 267 passed / 0 failed / 3 ignored | +| 版本 | v0.15.0 | +| 测试 | 427 passed / 0 failed / 3 ignored | | Clippy | `-D warnings` 全绿 | | 生产代码 unwrap | 0 | -| 许可证 | MIT | +| 许可证 | AGPL-3.0-or-later | --- diff --git a/SECURITY.md b/SECURITY.md index c4b61b7..113506a 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -6,8 +6,8 @@ The following versions of devbase currently receive security updates: | Version | Supported | | ------- | ------------------ | -| 0.14.x | :white_check_mark: | -| < 0.14 | :x: | +| 0.15.x | :white_check_mark: | +| < 0.15 | :x: | ## Reporting a Vulnerability diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 0000000..159f358 --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,34 @@ +# Getting Help with devbase + +## Documentation + +- **User Guide**: See [`README.md`](./README.md) for installation, quick start, and feature overview +- **Architecture**: See [`ARCHITECTURE.md`](./ARCHITECTURE.md) for technical design and module boundaries +- **Agent Guidelines**: See [`AGENTS.md`](./AGENTS.md) for MCP tool conventions and schema migration rules +- **Contributing**: See [`CONTRIBUTING.md`](./CONTRIBUTING.md) for build instructions and PR checklist + +## Bug Reports & Feature Requests + +- **Bug Report**: [Open an Issue](https://github.com/juice094/devbase/issues/new) — include devbase version (`devbase --version`), OS, and minimal reproduction steps +- **Feature Request**: [Open an Issue](https://github.com/juice094/devbase/issues/new) with prefix `[Feature]` — describe the use case, not just the solution +- **Security Issue**: See [`SECURITY.md`](./SECURITY.md) for responsible disclosure policy + +## Community + +- **Discussions**: Use [GitHub Discussions](https://github.com/juice094/devbase/discussions) for Q&A, show-and-tell, and architecture debates +- **Issue Tracker**: [GitHub Issues](https://github.com/juice094/devbase/issues) for confirmed bugs and accepted feature requests + +## Commercial Support + +For enterprise deployment, custom integrations, or closed-source licensing inquiries: + +- **Email**: juice094@protonmail.com +- **Subject**: `[devbase Commercial]` + +## Response Time + +| Type | Target Response | +|:---|:---| +| Security vulnerability | 72 hours (see SECURITY.md) | +| Critical bug (crash / data loss) | 7 days | +| Feature request / general question | 14 days | From 3b7446b91a592015a3c13925830e9f7c85d48c61 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Mon, 11 May 2026 23:34:24 +0800 Subject: [PATCH 06/17] =?UTF-8?q?test(knowledge=5Fengine):=20=E8=A1=A5?= =?UTF-8?q?=E9=BD=90=20index.rs=20=E4=B8=8E=20index=5Fstate.rs=20=E5=8D=95?= =?UTF-8?q?=E5=85=83=E6=B5=8B=E8=AF=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 修复 SCHEMA_DDL 缺失 repo_index_state 表(测试基础设施) - 将 registry/test_helpers 可见性调整为 pub(跨模块测试共享) - index.rs: 覆盖 prepare_repos (4 场景) + save_repo_index_state (1 场景) - index_state.rs: 覆盖 get_repo_index_state (Missing/Fresh/Stale/Unknown) 与 IndexState 行为方法 (is_fresh, changed_files_count) - 全部 22 项新增测试本地通过,Clippy 零警告,fmt 已检查 --- src/knowledge_engine/index.rs | 120 +++++++++++++++++ src/knowledge_engine/index_state.rs | 196 ++++++++++++++++++++++++++++ src/registry.rs | 2 +- src/registry/test_helpers.rs | 7 + 4 files changed, 324 insertions(+), 1 deletion(-) diff --git a/src/knowledge_engine/index.rs b/src/knowledge_engine/index.rs index 21183b9..db67cd3 100644 --- a/src/knowledge_engine/index.rs +++ b/src/knowledge_engine/index.rs @@ -473,3 +473,123 @@ fn save_repo_index_state( )?; Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + use crate::registry::RepoEntry; + use crate::registry::test_helpers::WorkspaceRegistry; + use std::path::Path; + + fn init_git_repo(path: &Path) -> git2::Repository { + let repo = git2::Repository::init(path).unwrap(); + let mut config = repo.config().unwrap(); + config.set_str("user.name", "Test").unwrap(); + config.set_str("user.email", "test@example.com").unwrap(); + let sig = repo.signature().unwrap(); + let tree_id = { + let mut index = repo.index().unwrap(); + index.write_tree().unwrap() + }; + let tree = repo.find_tree(tree_id).unwrap(); + repo.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[]).unwrap(); + drop(tree); + repo + } + + #[test] + fn test_prepare_repos_empty_path_returns_all() -> anyhow::Result<()> { + let mut conn = WorkspaceRegistry::init_in_memory()?; + let _ = WorkspaceRegistry::seed_test_repo(&mut conn, "repo1")?; + let _ = WorkspaceRegistry::seed_test_repo(&mut conn, "repo2")?; + + let repos = prepare_repos(&mut conn, "")?; + assert_eq!(repos.len(), 2); + assert!(repos.iter().any(|r| r.id == "repo1")); + assert!(repos.iter().any(|r| r.id == "repo2")); + Ok(()) + } + + #[test] + fn test_prepare_repos_matching_path_returns_one() -> anyhow::Result<()> { + let mut conn = WorkspaceRegistry::init_in_memory()?; + let tmp = tempfile::tempdir()?; + let path = tmp.path().join("myrepo"); + std::fs::create_dir(&path)?; + + let repo = RepoEntry { + id: "myrepo".to_string(), + local_path: path.clone(), + tags: vec![], + language: Some("rust".to_string()), + discovered_at: chrono::Utc::now(), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + crate::registry::repo::save_repo(&mut conn, &repo)?; + + let repos = prepare_repos(&mut conn, path.to_str().unwrap())?; + assert_eq!(repos.len(), 1); + assert_eq!(repos[0].id, "myrepo"); + Ok(()) + } + + #[test] + fn test_prepare_repos_nonexistent_path_errors() -> anyhow::Result<()> { + let mut conn = WorkspaceRegistry::init_in_memory()?; + let result = prepare_repos(&mut conn, "/nonexistent/path/12345"); + assert!(result.is_err()); + Ok(()) + } + + #[test] + fn test_prepare_repos_unregistered_existing_path_auto_registers() -> anyhow::Result<()> { + let mut conn = WorkspaceRegistry::init_in_memory()?; + let tmp = tempfile::tempdir()?; + let path = tmp.path().join("unregistered"); + std::fs::create_dir(&path)?; + let _ = init_git_repo(&path); + + let repos = prepare_repos(&mut conn, path.to_str().unwrap())?; + assert_eq!(repos.len(), 1); + assert_eq!(repos[0].local_path, path); + // Verify it was saved to registry + let all = crate::registry::repo::list_repos(&conn)?; + assert_eq!(all.len(), 1); + Ok(()) + } + + #[test] + fn test_save_and_get_repo_index_state() -> anyhow::Result<()> { + let mut conn = WorkspaceRegistry::init_in_memory()?; + let tmp = tempfile::tempdir()?; + let path = tmp.path().join("gitrepo"); + std::fs::create_dir(&path)?; + let repo = git2::Repository::init(&path)?; + let mut config = repo.config().unwrap(); + config.set_str("user.name", "Test").unwrap(); + config.set_str("user.email", "test@example.com").unwrap(); + let sig = repo.signature().unwrap(); + let tree_id = { + let mut index = repo.index().unwrap(); + index.write_tree().unwrap() + }; + let tree = repo.find_tree(tree_id).unwrap(); + let oid = repo.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])?; + + save_repo_index_state(&mut conn, "test-repo", &oid.to_string())?; + + let hash: Option = conn + .query_row( + "SELECT last_commit_hash FROM repo_index_state WHERE repo_id = ?1", + ["test-repo"], + |row| row.get(0), + ) + .unwrap_or(None); + assert_eq!(hash, Some(oid.to_string())); + Ok(()) + } +} diff --git a/src/knowledge_engine/index_state.rs b/src/knowledge_engine/index_state.rs index c17686e..4c1edea 100644 --- a/src/knowledge_engine/index_state.rs +++ b/src/knowledge_engine/index_state.rs @@ -140,4 +140,200 @@ mod tests { assert!(json.contains("\"state\":\"unknown\"")); assert!(json.contains("\"reason\":\"x\"")); } + + #[test] + fn test_index_state_is_fresh_and_changed_count() { + assert!(IndexState::Fresh.is_fresh()); + assert!(!IndexState::Missing.is_fresh()); + assert!(!IndexState::Unknown { reason: "err".into() }.is_fresh()); + + let stale = IndexState::Stale { + added: vec!["a.rs".into()], + modified: vec!["b.rs".into(), "c.rs".into()], + deleted: vec![], + }; + assert_eq!(stale.changed_files_count(), 3); + assert_eq!(IndexState::Fresh.changed_files_count(), 0); + assert_eq!(IndexState::Missing.changed_files_count(), 0); + } + + #[test] + fn test_get_repo_index_state_missing() -> anyhow::Result<()> { + use crate::registry::test_helpers::WorkspaceRegistry; + let conn = WorkspaceRegistry::init_in_memory()?; + let tmp = tempfile::tempdir()?; + let path = tmp.path().join("repo"); + std::fs::create_dir(&path)?; + let repo = git2::Repository::init(&path)?; + let mut config = repo.config().unwrap(); + config.set_str("user.name", "Test").unwrap(); + config.set_str("user.email", "test@example.com").unwrap(); + let sig = repo.signature().unwrap(); + let tree_id = { + let mut index = repo.index().unwrap(); + index.write_tree().unwrap() + }; + let tree = repo.find_tree(tree_id).unwrap(); + repo.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])?; + + let entry = RepoEntry { + id: "missing-repo".to_string(), + local_path: path, + tags: vec![], + language: Some("rust".to_string()), + discovered_at: chrono::Utc::now(), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + + let state = get_repo_index_state(&conn, &entry); + assert!(matches!(state, IndexState::Missing)); + Ok(()) + } + + #[test] + fn test_get_repo_index_state_fresh() -> anyhow::Result<()> { + use crate::registry::test_helpers::WorkspaceRegistry; + let conn = WorkspaceRegistry::init_in_memory()?; + let tmp = tempfile::tempdir()?; + let path = tmp.path().join("repo"); + std::fs::create_dir(&path)?; + let repo = git2::Repository::init(&path)?; + let mut config = repo.config().unwrap(); + config.set_str("user.name", "Test").unwrap(); + config.set_str("user.email", "test@example.com").unwrap(); + let sig = repo.signature().unwrap(); + let tree_id = { + let mut index = repo.index().unwrap(); + index.write_tree().unwrap() + }; + let tree = repo.find_tree(tree_id).unwrap(); + let oid = repo.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])?; + + conn.execute( + "INSERT INTO repo_index_state (repo_id, last_commit_hash, indexed_at) VALUES (?1, ?2, datetime('now'))", + ["fresh-repo", &oid.to_string()], + )?; + + let entry = RepoEntry { + id: "fresh-repo".to_string(), + local_path: path, + tags: vec![], + language: Some("rust".to_string()), + discovered_at: chrono::Utc::now(), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + + let state = get_repo_index_state(&conn, &entry); + assert!(matches!(state, IndexState::Fresh)); + Ok(()) + } + + #[test] + fn test_get_repo_index_state_stale() -> anyhow::Result<()> { + use crate::registry::test_helpers::WorkspaceRegistry; + let conn = WorkspaceRegistry::init_in_memory()?; + let tmp = tempfile::tempdir()?; + let path = tmp.path().join("repo"); + std::fs::create_dir(&path)?; + let repo = git2::Repository::init(&path)?; + let mut config = repo.config().unwrap(); + config.set_str("user.name", "Test").unwrap(); + config.set_str("user.email", "test@example.com").unwrap(); + let sig = repo.signature().unwrap(); + + // First commit + let tree_id = { + let mut index = repo.index().unwrap(); + index.write_tree().unwrap() + }; + let tree = repo.find_tree(tree_id).unwrap(); + let old_oid = repo.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])?; + drop(tree); + + // Save the first commit hash as last indexed + conn.execute( + "INSERT INTO repo_index_state (repo_id, last_commit_hash, indexed_at) VALUES (?1, ?2, datetime('now'))", + ["stale-repo", &old_oid.to_string()], + )?; + + // Second commit so HEAD moves forward and diff_since detects changes + let tree_id2 = { + let mut index = repo.index().unwrap(); + // Add a dummy file to create a new tree + let blob_oid = repo.blob(b"hello")?; + index.add_frombuffer( + &git2::IndexEntry { + ctime: git2::IndexTime::new(0, 0), + mtime: git2::IndexTime::new(0, 0), + dev: 0, + ino: 0, + mode: 0o100644, + uid: 0, + gid: 0, + file_size: 5, + id: blob_oid, + flags: 0, + flags_extended: 0, + path: b"file.txt".to_vec(), + }, + b"hello", + )?; + index.write_tree().unwrap() + }; + let parent = repo.find_commit(old_oid)?; + let tree2 = repo.find_tree(tree_id2).unwrap(); + repo.commit(Some("HEAD"), &sig, &sig, "second", &tree2, &[&parent])?; + drop(tree2); + + let entry = RepoEntry { + id: "stale-repo".to_string(), + local_path: path, + tags: vec![], + language: Some("rust".to_string()), + discovered_at: chrono::Utc::now(), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + + let state = get_repo_index_state(&conn, &entry); + assert!(matches!(state, IndexState::Stale { .. }), "expected Stale, got {:?}", state); + Ok(()) + } + + #[test] + fn test_get_repo_index_state_unknown_not_git() -> anyhow::Result<()> { + use crate::registry::test_helpers::WorkspaceRegistry; + let conn = WorkspaceRegistry::init_in_memory()?; + let tmp = tempfile::tempdir()?; + let path = tmp.path().join("not-git"); + std::fs::create_dir(&path)?; + + let entry = RepoEntry { + id: "unknown-repo".to_string(), + local_path: path, + tags: vec![], + language: Some("rust".to_string()), + discovered_at: chrono::Utc::now(), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + + let state = get_repo_index_state(&conn, &entry); + assert!(matches!(state, IndexState::Unknown { .. }), "expected Unknown, got {:?}", state); + Ok(()) + } } diff --git a/src/registry.rs b/src/registry.rs index d4967cb..971e610 100644 --- a/src/registry.rs +++ b/src/registry.rs @@ -517,7 +517,7 @@ impl crate::clients::RegistryClient for crate::storage::AppContext { } #[cfg(test)] -mod test_helpers; +pub mod test_helpers; #[cfg(test)] mod tests; diff --git a/src/registry/test_helpers.rs b/src/registry/test_helpers.rs index bddce67..ec57e55 100644 --- a/src/registry/test_helpers.rs +++ b/src/registry/test_helpers.rs @@ -1,5 +1,6 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 +pub use super::WorkspaceRegistry; use super::*; #[cfg(test)] @@ -355,6 +356,12 @@ CREATE TABLE IF NOT EXISTS orphan_tantivy_docs ( repo_id TEXT PRIMARY KEY, detected_at DATETIME DEFAULT current_timestamp ); + +CREATE TABLE IF NOT EXISTS repo_index_state ( + repo_id TEXT PRIMARY KEY, + last_commit_hash TEXT, + indexed_at DATETIME DEFAULT current_timestamp +); "#; #[cfg(test)] From 69eb364a74fe36bfe13db44e1fb5fa153162073c Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Mon, 11 May 2026 23:34:57 +0800 Subject: [PATCH 07/17] chore: ignore coverage_report.txt --- .gitignore | Bin 734 -> 754 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/.gitignore b/.gitignore index f35903d90ee5327bde64da673356e3d95e901948..5539271297c5568a403de0f80f7fdae064c8e9cd 100644 GIT binary patch delta 28 jcmcb|`iXVJJtmRl{Ib-d#PrnoqSS)?q7uE5iV`jWu4D^d delta 7 OcmeywdXII(JthDS`vYqL From 195f648a8df5c422057637bc99d22e2cbba32916 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 09:51:47 +0800 Subject: [PATCH 08/17] =?UTF-8?q?ci:=20upgrade=20actions/checkout=20v4=20?= =?UTF-8?q?=E2=86=92=20v6=20(Node.js=2024)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit GitHub Actions 将在 2026-09-16 移除 Node.js 20 支持。 actions/checkout@v6 基于 Node.js 24,消除 CI deprecation 警告。 - ci.yml: 6 处 checkout@v4 → v6 - release.yml: 1 处 checkout@v4 → v6 --- .github/workflows/ci.yml | 12 ++++++------ .github/workflows/release.yml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bfa6650..68f9185 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: name: Check runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 with: @@ -37,7 +37,7 @@ jobs: name: Test runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 with: @@ -59,7 +59,7 @@ jobs: name: Rustfmt runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: dtolnay/rust-toolchain@stable with: components: rustfmt @@ -69,7 +69,7 @@ jobs: name: Clippy runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: dtolnay/rust-toolchain@stable with: components: clippy @@ -94,7 +94,7 @@ jobs: name: Security Audit runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 with: @@ -107,7 +107,7 @@ jobs: name: Architecture Invariants runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 with: fetch-depth: 0 - name: Run invariant checks diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 01da81a..f9b3115 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,7 +16,7 @@ jobs: name: Build Windows Release runs-on: windows-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 with: From fabba969e9e07585d8f2387eafc695a5e0a0e8b8 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 10:36:56 +0800 Subject: [PATCH 09/17] =?UTF-8?q?test(vault):=20=E8=A1=A5=E9=BD=90=20index?= =?UTF-8?q?er.rs=20=E5=8D=95=E5=85=83=E6=B5=8B=E8=AF=95=EF=BC=8C=E4=BF=AE?= =?UTF-8?q?=E5=A4=8D=20id=20field=20=E5=88=86=E8=AF=8D=E5=AF=BC=E8=87=B4?= =?UTF-8?q?=E5=88=A0=E9=99=A4=E5=A4=B1=E6=95=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## 变更 - vault/indexer.rs: 提取 / , 解除对全局 的测试耦合 - 新增 4 项隔离测试(临时 Tantivy 索引,避免 writer 锁冲突): - : 空 notes 时清除旧 vault docs - : 从文件系统读取内容并索引 - : 单条新增 - : 单条更新(delete + add) - search.rs: uid=197609(22414) gid=197609 groups=197609 field 从 → (精确匹配) - 修复生产缺陷: 分词导致 对含连字符/空格的 ID 无法匹配,实际不生效 - Schema mismatch 时 会自动重建索引 ## 验证 - running 425 tests test arxiv::tests::test_parse_arxiv_atom_invalid_xml ... ok test arxiv::tests::test_parse_arxiv_atom_missing_title ... ok test arxiv::tests::test_parse_arxiv_atom_no_authors ... ok test arxiv::tests::test_parse_arxiv_atom_success ... ok test asyncgit::tests::test_async_notification_variants ... ok test asyncgit::tests::test_async_repo_status_clone ... ok test asyncgit::tests::test_async_single_job_new ... ok test asyncgit::tests::test_repo_status_notification_clone ... ok test asyncgit::tests::test_sync_progress_notification_clone ... ok test backup::tests::test_backup_filename_contains_timestamp ... ok test backup::tests::test_backup_filename_format ... ok test backup::tests::test_clean_old_backups_removes_oldest ... ok test backup::tests::test_export_sqlite_creates_file ... ok test config::tests::test_config_custom_values ... ok test config::tests::test_config_default ... ok test config::tests::test_config_empty_uses_defaults ... ok test config::tests::test_config_serialize_roundtrip ... ok test daemon::tests::test_daemon_new ... ok test dependency_graph::tests::test_parse_cargo_toml_deps ... ok test dependency_graph::tests::test_parse_cmake_add_subdirectory_local ... ok test dependency_graph::tests::test_parse_cmake_fetchcontent_declare ... ok test dependency_graph::tests::test_parse_cmake_find_package ... ok test dependency_graph::tests::test_parse_cmake_target_link_libraries ... ok test dependency_graph::tests::test_parse_go_mod_deps ... ok test dependency_graph::tests::test_parse_package_json_deps ... ok test dependency_graph::tests::test_parse_pyproject_toml_deps ... ok test dependency_graph::tests::test_parse_requirements_txt_deps ... ok test digest::tests::test_generate_daily_digest_empty ... ok test digest::tests::test_generate_daily_digest_with_repos ... ok test digest::tests::test_generate_daily_digest_with_unhealthy_repo ... ok test discovery_engine::tests::test_discover_dependencies_cargo ... ok test discovery_engine::tests::test_discover_dependencies_no_manifest ... ok test discovery_engine::tests::test_normalize_dep_name ... ok test health::tests::test_compute_workspace_hash_changes_with_content ... ok test health::tests::test_compute_workspace_hash_empty_dir ... ok test health::tests::test_compute_workspace_hash_ignores_dirs ... ok test health::tests::test_fmt_version_bun ... ok test health::tests::test_fmt_version_cargo ... ok test health::tests::test_fmt_version_cmake ... ok test health::tests::test_fmt_version_docker ... ok test health::tests::test_fmt_version_go ... ok test health::tests::test_fmt_version_java ... ok test health::tests::test_fmt_version_python ... ok test health::tests::test_fmt_version_rustc ... ok test health::tests::test_fmt_version_single_word ... ok test health::tests::test_fmt_version_unknown ... ok test i18n::en::tests::test_build ... ok test i18n::tests::test_en_build ... ok test i18n::tests::test_format_template_basic ... ok test i18n::tests::test_format_template_extra_args_ignored ... ok test i18n::tests::test_format_template_multiple ... ok test i18n::tests::test_format_template_no_placeholder ... ok test i18n::tests::test_from_language_en ... ok test i18n::tests::test_log_strings_loaded_repos ... ok test i18n::tests::test_log_strings_status_fmt ... ok test i18n::tests::test_zh_build ... ok test i18n::zh_cn::tests::test_build ... ok test knowledge_engine::index::tests::test_prepare_repos_empty_path_returns_all ... ok test knowledge_engine::index::tests::test_prepare_repos_matching_path_returns_one ... ok test knowledge_engine::index::tests::test_prepare_repos_nonexistent_path_errors ... ok test knowledge_engine::index::tests::test_prepare_repos_unregistered_existing_path_auto_registers ... ok test knowledge_engine::index::tests::test_save_and_get_repo_index_state ... ok test knowledge_engine::index_state::tests::test_get_repo_index_state_fresh ... ok test knowledge_engine::index_state::tests::test_get_repo_index_state_missing ... ok test knowledge_engine::index_state::tests::test_get_repo_index_state_stale ... ok test knowledge_engine::index_state::tests::test_get_repo_index_state_unknown_not_git ... ok test knowledge_engine::index_state::tests::test_index_state_is_fresh_and_changed_count ... ok test knowledge_engine::index_state::tests::test_index_state_variants_serialize ... ok test knowledge_engine::readme::tests::test_build_llm_prompt_contains_json_instruction ... ok test knowledge_engine::readme::tests::test_extract_module_structure_for_devbase ... ok test knowledge_engine::readme::tests::test_extract_module_structure_non_rust ... ok test knowledge_engine::readme::tests::test_extract_readme_summary_basic ... ok test knowledge_engine::readme::tests::test_extract_readme_summary_truncates_at_sentence ... ok test knowledge_engine::readme::tests::test_extract_readme_summary_with_badges ... ok test knowledge_engine::readme::tests::test_fallback_summary_cargo_toml ... ok test knowledge_engine::readme::tests::test_module_info_clone ... ok test knowledge_engine::readme::tests::test_parse_llm_json_markdown_fenced ... ok test knowledge_engine::readme::tests::test_parse_llm_json_valid ... ok test knowledge_engine::readme::tests::test_real_gitui_repo ... ignored, integration test on real gitui repo test knowledge_engine::readme::tests::test_real_syncthing_repo ... ignored, integration test on real syncthing repo test knowledge_engine::readme::tests::test_try_llm_summary_disabled_returns_none ... ok test mcp::tests::test_destructive_gate_disabled_by_default ... ok test mcp::tests::test_destructive_gate_enabled ... ok test mcp::tests::test_format_mcp_message ... ok test mcp::tests::test_initialize ... ok test mcp::tests::test_nl_filter_repos_empty_query_returns_empty ... ok test mcp::tests::test_nl_filter_repos_fallback_finds_by_language ... ok test mcp::tests::test_nl_filter_repos_tantivy_finds_devbase ... ok test mcp::tests::test_parse_tool_tiers ... ok test mcp::tests::test_parse_tool_tiers_empty ... ok test mcp::tests::test_stdio_content_length_format ... ok test mcp::tests::test_tools_call_devkit_arxiv_fetch ... ok test mcp::tests::test_tools_call_devkit_health ... ok test mcp::tests::test_tools_call_devkit_project_context ... ok test mcp::tests::test_tools_call_devkit_query ... ok test mcp::tests::test_tools_call_devkit_skill_discover ... ok test mcp::tests::test_tools_call_devkit_skill_list ... ok test mcp::tests::test_tools_call_devkit_skill_run ... ignored, requires knowledge-report skill installed and may run external Python process test mcp::tests::test_tools_call_devkit_skill_search ... ok test mcp::tests::test_tools_call_unknown_tool ... ok test mcp::tests::test_tools_list ... ok test mcp::tests::test_unknown_method ... ok test mcp::tools::context::tests::test_collect_hot_files_basic ... ok test mcp::tools::context::tests::test_collect_hot_files_empty_repo ... ok test mcp::tools::context::tests::test_collect_hot_files_no_git ... ok test mcp::tools::context::tests::test_collect_recent_commits_basic ... ok test mcp::tools::context::tests::test_collect_recent_commits_empty_repo ... ok test mcp::tools::context::tests::test_collect_recent_commits_limit ... ok test mcp::tools::context::tests::test_name ... ok test mcp::tools::context::tests::test_schema_is_object ... ok test mcp::tools::known_limit::tests::test_name ... ok test mcp::tools::known_limit::tests::test_schema_is_object ... ok test mcp::tools::oplog::tests::test_name ... ok test mcp::tools::oplog::tests::test_schema_is_object ... ok test mcp::tools::query::tests::test_name ... ok test mcp::tools::query::tests::test_schema_is_object ... ok test mcp::tools::relations::tests::test_relation_query_bidirectional ... ok test mcp::tools::relations::tests::test_relation_store_and_query_roundtrip ... ok test mcp::tools::relations::tests::test_relation_store_missing_required_fields ... ok test mcp::tools::repo::tests::test_extract_tag_from_query ... ok test mcp::tools::repo::tests::test_parse_github_repo_https ... ok test mcp::tools::repo::tests::test_parse_github_repo_invalid ... ok test mcp::tools::repo::tests::test_parse_github_repo_ssh ... ok test mcp::tools::repo::tests::test_parse_stars_condition ... ok test mcp::tools::search::tests::test_parse_f32_array ... ok test mcp::tools::skill::tests::test_name ... ok test mcp::tools::skill::tests::test_schema_is_object ... ok test mcp::tools::tests::test_tool_modules_compile ... ok test mcp::tools::vault::tests::test_resolve_vault_path_absolute_blocked ... ok test mcp::tools::vault::tests::test_resolve_vault_path_dotdot_within_bounds ... ok test mcp::tools::vault::tests::test_resolve_vault_path_empty ... ok test mcp::tools::vault::tests::test_resolve_vault_path_nested ... ok test mcp::tools::vault::tests::test_resolve_vault_path_normal ... ok test mcp::tools::vault::tests::test_resolve_vault_path_performance ... ok test mcp::tools::vault::tests::test_resolve_vault_path_traversal_blocked ... ok test mcp::tools::vault::tests::test_resolve_vault_path_with_dot ... ok test mcp::tools::vault::tests::test_vault_daily_appends_to_existing ... ok test mcp::tools::vault::tests::test_vault_daily_creates_file ... ok test mcp::tools::vault::tests::test_vault_graph_basic ... ok test mcp::tools::vault::tests::test_vault_graph_filtered_by_repo ... ok test mcp::tools::workflow::tests::test_workflow_list_empty_registry ... ok test mcp::tools::workflow::tests::test_workflow_run_not_found ... ok test mcp::tools::workflow::tests::test_workflow_status_invalid_id ... ok test oplog_analytics::tests::test_generate_report_empty_db ... ok test oplog_analytics::tests::test_generate_report_with_data ... ok test query::tests::test_eval_behind_match ... ok test query::tests::test_eval_behind_no_match ... ok test query::tests::test_eval_keyword_match ... ok test query::tests::test_eval_keyword_no_match ... ok test query::tests::test_eval_note_match ... ok test query::tests::test_eval_note_no_match ... ok test query::tests::test_eval_stale_never_synced ... ok test query::tests::test_eval_tag_match ... ok test query::tests::test_eval_tag_no_match ... ok test query::tests::test_parse_cmp_expr_empty ... ok test query::tests::test_parse_cmp_expr_eq_implicit ... ok test query::tests::test_parse_cmp_expr_gt ... ok test query::tests::test_parse_query_behind ... ok test query::tests::test_parse_query_keyword ... ok test query::tests::test_parse_query_lang ... ok test query::tests::test_parse_query_multiple ... ok test query::tests::test_parse_query_note ... ok test query::tests::test_parse_query_stale ... ok test query::tests::test_parse_query_tag ... ok test registry::code_symbols::tests::test_query_code_symbols_by_file ... ok test registry::code_symbols::tests::test_query_code_symbols_by_name ... ok test registry::code_symbols::tests::test_query_code_symbols_by_symbol_type ... ok test registry::code_symbols::tests::test_query_code_symbols_limit ... ok test registry::code_symbols::tests::test_query_code_symbols_no_filter ... ok test registry::dead_code::tests::test_query_dead_code_basic ... ok test registry::dead_code::tests::test_query_dead_code_excludes_called ... ok test registry::dead_code::tests::test_query_dead_code_excludes_main ... ok test registry::dead_code::tests::test_query_dead_code_excludes_pub_when_not_include_pub ... ok test registry::dead_code::tests::test_query_dead_code_excludes_test_attribute ... ok test registry::dead_code::tests::test_query_dead_code_excludes_test_prefix ... ok test registry::dead_code::tests::test_query_dead_code_excludes_tests_rs ... ok test registry::knowledge::tests::test_cross_repo_search_symbols ... ok test registry::knowledge::tests::test_find_papers_by_venue ... ok test registry::knowledge::tests::test_find_related_symbols ... ok test registry::knowledge::tests::test_module_crud ... ok test registry::knowledge::tests::test_paper_roundtrip ... ok test registry::knowledge::tests::test_save_embeddings ... ok test registry::knowledge::tests::test_save_summary_smoke ... ok test registry::knowledge::tests::test_semantic_search_symbols ... ok test registry::knowledge::tests::test_symbol_read_tracking ... ok test registry::knowledge_meta::tests::test_knowledge_meta_crud ... ok test registry::known_limits::tests::test_known_limit_crud ... ok test registry::known_limits::tests::test_list_known_limits_by_category ... ok test registry::links::tests::test_get_linked_repos ... ok test registry::links::tests::test_get_linked_repos_empty ... ok test registry::links::tests::test_get_linked_repos_full ... ok test registry::links::tests::test_get_linked_vault_notes ... ok test registry::links::tests::test_get_linked_vaults ... ok test registry::migrate::tests::test_db_path_format ... ok test registry::migrate::tests::test_workspace_dir_format ... ok test registry::repo::tests::test_list_repos_empty ... ok test registry::repo::tests::test_list_repos_need_index ... ok test registry::repo::tests::test_list_repos_stale_health ... ok test registry::repo::tests::test_list_workspaces_by_tier ... ok test registry::repo::tests::test_save_and_list_repo ... ok test registry::repo::tests::test_save_repo_updates_existing ... ok test registry::repo::tests::test_save_repo_with_remotes ... ok test registry::repo::tests::test_save_repo_with_stars ... ok test registry::repo::tests::test_save_repo_with_tags ... ok test registry::repo::tests::test_update_repo_language ... ok test registry::repo::tests::test_update_repo_last_synced_at ... ok test registry::repo::tests::test_update_repo_tier ... ok test registry::repo::tests::test_update_repo_workspace_type ... ok test registry::repos_toml::tests::test_apply_overrides ... ok test registry::repos_toml::tests::test_parse_repos_toml ... ok test registry::test_helpers::tests::test_in_memory_schema_version ... ok test registry::test_helpers::tests::test_knowledge_meta_table_exists ... ok test registry::test_helpers::tests::test_known_limits_table_exists ... ok test registry::test_helpers::tests::test_workflow_executions_table_exists ... ok test registry::tests::test_dead_code_excludes_pub_variants_and_main ... ok test registry::tests::test_dead_code_include_pub ... ok test registry::tests::test_oplog_event_type_roundtrip ... ok test registry::tests::test_oplog_migration_compat ... ok test registry::tests::test_oplog_save_and_list ... ok test registry::tests::test_primary_remote_fallback_to_first ... ok test registry::tests::test_primary_remote_none ... ok test registry::tests::test_primary_remote_prefers_origin ... ok test registry::tests::test_stars_cache_miss ... ok test registry::tests::test_stars_cache_roundtrip ... ok test registry::tests::test_stars_cache_update ... ok test registry::vault::tests::test_delete_vault_note ... ok test registry::vault::tests::test_list_vault_notes_empty ... ok test registry::vault::tests::test_save_and_list_vault_note ... ok test scan::tests::test_detect_language_cpp ... ok test scan::tests::test_detect_language_go ... ok test scan::tests::test_detect_language_node ... ok test scan::tests::test_detect_language_none ... ok test scan::tests::test_detect_language_python_pyproject ... ok test scan::tests::test_detect_language_python_requirements ... ok test scan::tests::test_detect_language_rust ... ok test scan::tests::test_discover_repos_devbase_ignore ... ok test scan::tests::test_discover_repos_excludes_paths ... ok test scan::tests::test_discover_repos_excludes_patterns ... ok test scan::tests::test_discover_repos_finds_non_git_workspaces ... ok test scan::tests::test_inspect_non_git_workspace_generic ... ok test scan::tests::test_inspect_non_git_workspace_openclaw ... ok test scan::tests::test_is_excluded_path_sync_context ... ok test scan::tests::test_is_nested_submodule_false ... ok test scan::tests::test_is_nested_submodule_true ... ok test scan::tests::test_normal_tags ... ok test scan::tests::test_parse_github_owner_repo_https ... ok test scan::tests::test_parse_github_owner_repo_invalid ... ok test scan::tests::test_parse_github_owner_repo_non_github ... ok test scan::tests::test_parse_github_owner_repo_ssh ... ok test scan::tests::test_zip_snapshot_tags_main ... ok test scan::tests::test_zip_snapshot_tags_master ... ok test search::hybrid::tests::test_hybrid_search_fallback_to_keyword ... ok test search::hybrid::tests::test_keyword_search_basic ... ok test search::hybrid::tests::test_rrf_merge_empty_lists ... ok test search::hybrid::tests::test_rrf_merge_single_list_passthrough ... ok test search::hybrid::tests::test_rrf_merge_two_lists ... ok test search::symbol_index::tests::test_add_and_search_symbol ... ok test search::symbol_index::tests::test_delete_repo_symbols ... ok test search::symbol_index::tests::test_search_signature_match ... ok test search::tests::test_add_and_search_repo ... ok test search::tests::test_add_vault_doc ... ok test search::tests::test_build_schema ... ok test search::tests::test_delete_repo_doc ... ok test search::tests::test_index_is_empty ... ok test search::tests::test_list_indexed_repo_ids ... ok test search::tests::test_search_repos ... ok test search::tests::test_search_vault ... ok test search::tests::test_sync_index_to_db_removes_orphans ... ok test semantic_index::git_diff::tests::test_current_head_hash_after_commit ... ok test semantic_index::git_diff::tests::test_current_head_hash_empty_repo ... ok test semantic_index::git_diff::tests::test_diff_since_deleted_file ... ok test semantic_index::git_diff::tests::test_diff_since_no_changes ... ok test semantic_index::git_diff::tests::test_diff_since_none_first_index ... ok test semantic_index::git_diff::tests::test_diff_since_with_last_hash ... ok test semantic_index::git_diff::tests::test_diff_since_workdir_modification ... ok test semantic_index::git_diff::tests::test_diff_since_workdir_untracked ... ok test semantic_index::symbol::tests::test_extract_rust_attributes ... ok test semantic_index::tests::test_extract_go_const ... ok test semantic_index::tests::test_extract_go_function ... ok test semantic_index::tests::test_extract_go_method ... ok test semantic_index::tests::test_extract_go_struct_and_interface ... ok test semantic_index::tests::test_extract_js_function ... ok test semantic_index::tests::test_extract_multiple_rust ... ok test semantic_index::tests::test_extract_python_class ... ok test semantic_index::tests::test_extract_python_function ... ok test semantic_index::tests::test_extract_python_multiple ... ok test semantic_index::tests::test_extract_rust_function ... ok test semantic_index::tests::test_extract_rust_struct ... ok test semantic_index::tests::test_extract_ts_class_and_interface ... ok test semantic_index::tests::test_extract_ts_enum ... ok test semantic_index::tests::test_index_repo_full ... ok test semantic_index::tests::test_save_calls ... ok test semantic_index::tests::test_save_symbols ... ok test skill_runtime::clarity_sync::tests::test_conflict_resolution_skips_older_devbase_skill ... ok test skill_runtime::clarity_sync::tests::test_conflict_resolution_updates_when_devbase_newer ... ok test skill_runtime::clarity_sync::tests::test_sync_skills_to_clarity ... ok test skill_runtime::dependency::tests::test_detect_cycle_direct ... ok test skill_runtime::dependency::tests::test_detect_cycle_none ... ok test skill_runtime::dependency::tests::test_resolve_cycle_fails ... ok test skill_runtime::dependency::tests::test_resolve_topological_order ... ok test skill_runtime::dependency::tests::test_validate_dependencies_all_satisfied ... ok test skill_runtime::dependency::tests::test_validate_dependencies_missing ... ok test skill_runtime::discover::tests::test_generate_entry_script_node ... ok test skill_runtime::discover::tests::test_generate_entry_script_python ... ok test skill_runtime::discover::tests::test_generate_entry_script_rust ... ok test skill_runtime::discover::tests::test_generate_skill_md_structure ... ok test skill_runtime::executor::tests::test_hard_veto_guard_empty_registry ... ok test skill_runtime::executor::tests::test_hard_veto_guard_with_unresolved_vetoes ... ok test skill_runtime::executor::tests::test_resolve_interpreter_binary ... ok test skill_runtime::executor::tests::test_resolve_interpreter_powershell ... ok test skill_runtime::executor::tests::test_resolve_interpreter_python ... ok test skill_runtime::executor::tests::test_resolve_interpreter_shell ... ok test skill_runtime::executor::tests::test_run_skill_not_found ... ok test skill_runtime::executor::tests::test_run_skill_success ... ok test skill_runtime::publish::tests::test_get_default_remote_fallback ... ok test skill_runtime::publish::tests::test_get_default_remote_origin ... ok test skill_runtime::publish::tests::test_push_tag_no_remote ... ok test skill_runtime::publish::tests::test_push_tag_success_to_bare_remote ... ok test skill_runtime::registry::tests::test_execution_tracking ... ok test skill_runtime::registry::tests::test_install_and_get_skill ... ok test skill_runtime::registry::tests::test_list_skills_by_type ... ok test skill_runtime::registry::tests::test_search_skills_text ... ok test skill_runtime::registry::tests::test_uninstall_skill ... ok test skill_runtime::scoring::tests::test_calculate_scores ... ok test skill_runtime::scoring::tests::test_recalculate_all ... ok test skill_runtime::scoring::tests::test_recommend_skills ... ok test skill_sync::tests::test_convert_to_skill ... ok test skill_sync::tests::test_extract_description_long ... ok test skill_sync::tests::test_extract_description_short ... ok test skill_sync::tests::test_extract_description_skips_heading ... ok test storage::tests::test_app_context_with_temp_storage ... ok test storage::tests::test_repair_tantivy_consistency_detects_orphan ... ok test sync::orchestrator::tests::test_sync_orchestrator_new ... ok test sync::policy::tests::test_classify_sync_error ... ok test sync::policy::tests::test_recommend_sync_action ... ok test sync::policy::tests::test_sync_policy_capabilities ... ok test sync::policy::tests::test_sync_policy_from_tags ... ok test sync::tasks::tests::test_map_action_known ... ok test sync::tasks::tests::test_map_action_unknown ... ok test sync::tasks::tests::test_write_syncdone_marker ... ok test sync::tests::test_assess_safety_blocked_dirty ... ok test sync::tests::test_assess_safety_blocked_diverged_conservative ... ok test sync::tests::test_assess_safety_diverged_rebase_allowed ... ok test sync::tests::test_assess_safety_no_upstream ... ok test sync::tests::test_assess_safety_safe_ff ... ok test sync::tests::test_assess_safety_up_to_date ... ok test sync::tests::test_collect_tasks_default_mode_excludes_untagged ... ok test sync::tests::test_collect_tasks_default_mode_includes_known_tags ... ok test sync::tests::test_collect_tasks_explicit_filter_includes_untagged ... ok test sync::tests::test_map_action ... ok test sync::tests::test_perform_merge_fast_forward ... ok test sync::tests::test_perform_merge_up_to_date ... ok test sync::tests::test_sync_repo_skip_no_syncdone ... ok test sync::tests::test_write_syncdone_marker ... ok test tui::event::tests::test_tui_action_variants ... ok test tui::layout::tests::test_layout_centered ... ok test tui::layout::tests::test_layout_compact ... ok test tui::layout::tests::test_layout_inner ... ok test tui::layout::tests::test_layout_standard ... ok test tui::layout::tests::test_layout_wide ... ok test tui::render::detail::tests::test_overview_status_desc ... ok test tui::render::detail::tests::test_overview_status_icon ... ok test tui::render::detail::tests::test_sync_policy_color ... ok test tui::render::help::tests::test_help_section_empty ... ok test tui::render::help::tests::test_help_section_with_bindings ... ok test tui::render::list::tests::test_repo_status_fg ... ok test tui::render::list::tests::test_repo_status_icon ... ok test tui::render::logs::tests::test_format_log_line_plain ... ok test tui::render::logs::tests::test_format_log_line_with_timestamp ... ok test tui::render::popups::tests::test_search_results_title_empty ... ok test tui::render::popups::tests::test_search_results_title_with_results ... ok test tui::render::tests::test_read_repo_summary_found ... ok test tui::render::tests::test_read_repo_summary_missing ... ok test tui::render::tests::test_read_syncdone_info_missing ... ok test tui::render::tests::test_read_syncdone_info_valid ... ok test tui::state::tests::test_run_nlp_selected_skill_empty_results ... ok test tui::tests::test_detail_tab_label ... ok test tui::tests::test_main_view_toggle ... ok test tui::tests::test_search_mode_label ... ok test tui::tests::test_sort_mode_toggle ... ok test tui::theme::tests::test_styles_from_dark_theme ... ok test tui::theme::tests::test_theme_dark_colors ... ok test tui::theme::tests::test_theme_default_is_dark ... ok test tui::theme::tests::test_theme_light_colors ... ok test vault::backlinks::tests::test_backlink_index_basic ... ok test vault::fs_io::tests::test_read_note_body_with_frontmatter ... ok test vault::fs_io::tests::test_read_note_body_without_frontmatter ... ok test vault::fs_io::tests::test_read_note_content ... ok test vault::fs_io::tests::test_read_note_content_missing ... ok test vault::indexer::tests::test_index_vault_note_core_add ... ok test vault::indexer::tests::test_index_vault_note_core_update ... ok test vault::indexer::tests::test_reindex_vault_core_empty ... ok test vault::indexer::tests::test_reindex_vault_core_with_notes ... ok test vault::scanner::tests::test_scan_vault_basic ... ok test vault::scanner::tests::test_scan_vault_empty_dir ... ok test vault::scanner::tests::test_scan_vault_missing_dir ... ok test watch::tests::test_folder_scheduler_degrades_to_scan ... ok test watch::tests::test_folder_scheduler_first_run ... ok test watch::tests::test_folder_scheduler_incremental_no_change ... ok test watch::tests::test_watch_aggregator_dedup ... ok test workflow::executor::tests::test_condition_step_false ... ok test workflow::executor::tests::test_condition_step_true ... ok test workflow::executor::tests::test_loop_body_continue ... ok test workflow::executor::tests::test_loop_body_fallback ... ok test workflow::executor::tests::test_loop_empty_collection ... ok test workflow::executor::tests::test_loop_failure ... ok test workflow::executor::tests::test_loop_multi_iteration ... ok test workflow::executor::tests::test_loop_single_iteration ... ok test workflow::executor::tests::test_parallel_step ... ok test workflow::executor::tests::test_subworkflow_step ... ok test workflow::parser::tests::test_parse_basic_workflow ... ok test workflow::parser::tests::test_parse_invalid_id ... ok test workflow::scheduler::tests::test_linear_schedule ... ok test workflow::scheduler::tests::test_parallel_schedule ... ok test workflow::scheduler::tests::test_transitive_deps ... ok test workflow::scheduler::tests::test_transitive_deps_leaf ... ok test workflow::state::tests::test_create_and_update_execution ... ok test workflow::state::tests::test_end_to_end_workflow_lifecycle ... ok test workflow::state::tests::test_save_and_get_workflow ... ok test workflow::validator::tests::test_cycle_detected ... ok test workflow::validator::tests::test_loop_body_duplicate_global_id ... ok test workflow::validator::tests::test_loop_body_missing_dep ... ok test workflow::validator::tests::test_loop_body_valid ... ok test workflow::validator::tests::test_missing_dep ... ok test workflow::validator::tests::test_valid_dag ... ok test result: ok. 422 passed; 0 failed; 3 ignored; 0 measured; 0 filtered out; finished in 10.73s running 7 tests test commands::limit::tests::test_run_limit_delete_not_found ... ok test commands::limit::tests::test_run_limit_seed_and_list_json ... ok test commands::simple::tests::test_run_vault_list_empty ... ok test commands::skill::tests::test_run_skill_list_empty ... ok test commands::skill::tests::test_run_skill_uninstall_not_found ... ok test commands::workflow::tests::test_run_workflow_delete_not_found ... ok test commands::workflow::tests::test_run_workflow_list_empty ... ok test result: ok. 7 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.18s running 11 tests test test_backup_export ... ok test test_health_empty_registry ... ok test test_limit_add_and_list ... ok test test_limit_list_empty ... ok test test_registry_backups_empty ... ok test test_scan_git_repo ... ok test test_skill_discover ... ok test test_skill_list_empty ... ok test test_sync_skips_unmanaged_repo ... ok test test_tag_enables_sync ... ok test test_version ... ok test result: ok. 11 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 6.48s Testing save_repo Success Testing list_repos_500 Success Testing get_health Success Testing format_mcp_message Success Testing index_repo_full/scale/small Success Testing index_repo_full/scale/medium Success Testing index_repo_full/scale/full Success Testing cosine_similarity/dim/128 Success Testing cosine_similarity/dim/512 Success Testing cosine_similarity/dim/768 Success Testing extract_symbols/lang/rust Success Testing extract_symbols/lang/python Success Testing extract_symbols/lang/go Success Testing parse_cmake_lists/complex Success running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s 440 passed / 0 failed - 零警告 - 通过 --- src/search.rs | 4 +- src/vault/indexer.rs | 175 +++++++++++++++++++++++++++++++++++++++---- 2 files changed, 162 insertions(+), 17 deletions(-) diff --git a/src/search.rs b/src/search.rs index 3468434..0c10485 100644 --- a/src/search.rs +++ b/src/search.rs @@ -11,7 +11,7 @@ use tantivy::{ Index, IndexReader, IndexWriter, ReloadPolicy, TantivyDocument, TantivyError, collector::TopDocs, query::{BooleanQuery, Occur, QueryParser, TermQuery}, - schema::{STORED, Schema, TEXT, Value}, + schema::{STORED, STRING, Schema, TEXT, Value}, }; const INDEX_DIR: &str = "devbase/search_index"; @@ -24,7 +24,7 @@ fn index_path() -> Result { fn build_schema() -> Schema { let mut schema_builder = Schema::builder(); - schema_builder.add_text_field("id", TEXT | STORED); + schema_builder.add_text_field("id", STRING | STORED); schema_builder.add_text_field("title", TEXT | STORED); schema_builder.add_text_field("content", TEXT); schema_builder.add_text_field("tags", TEXT); diff --git a/src/vault/indexer.rs b/src/vault/indexer.rs index cb87690..cc747bb 100644 --- a/src/vault/indexer.rs +++ b/src/vault/indexer.rs @@ -2,6 +2,8 @@ // Copyright (c) 2026 juice094 use crate::search; use crate::vault::fs_io; +use tantivy::IndexWriter; +use tantivy::schema::Schema; use tracing::info; /// Index all vault notes from the registry into Tantivy. @@ -17,23 +19,28 @@ pub fn reindex_vault(conn: &rusqlite::Connection) -> anyhow::Result<()> { let (index, _reader) = search::init_index()?; let mut writer = search::get_writer(&index)?; let schema = index.schema(); + reindex_vault_core(¬es, &mut writer, &schema) +} +fn reindex_vault_core( + notes: &[crate::registry::VaultNote], + writer: &mut IndexWriter, + schema: &Schema, +) -> anyhow::Result<()> { // Delete all existing vault docs let doc_type = schema.get_field("doc_type")?; let term = tantivy::Term::from_field_text(doc_type, "vault"); writer.delete_term(term); let mut indexed = 0; - for note in ¬es { + for note in notes { let title = note.title.as_deref().unwrap_or(¬e.id); let tags: Vec = note.tags.clone(); // P1-1: read content from filesystem; fallback to empty string if unreadable let content = fs_io::read_note_body(¬e.path).map(|(body, _fm)| body).unwrap_or_default(); - if let Err(e) = - search::add_vault_doc(&mut writer, &schema, ¬e.id, title, &content, &tags) - { + if let Err(e) = search::add_vault_doc(writer, schema, ¬e.id, title, &content, &tags) { tracing::warn!("Failed to index vault note {}: {}", note.id, e); } else { indexed += 1; @@ -50,13 +57,20 @@ pub fn index_vault_note(note: &crate::registry::VaultNote) -> anyhow::Result<()> let (index, _reader) = search::init_index()?; let mut writer = search::get_writer(&index)?; let schema = index.schema(); + index_vault_note_core(note, &mut writer, &schema) +} +fn index_vault_note_core( + note: &crate::registry::VaultNote, + writer: &mut IndexWriter, + schema: &Schema, +) -> anyhow::Result<()> { // Delete old doc by id let id_field = schema.get_field("id")?; writer.delete_term(tantivy::Term::from_field_text(id_field, ¬e.id)); let title = note.title.as_deref().unwrap_or(¬e.id); - search::add_vault_doc(&mut writer, &schema, ¬e.id, title, ¬e.content, ¬e.tags)?; + search::add_vault_doc(writer, schema, ¬e.id, title, ¬e.content, ¬e.tags)?; writer.commit()?; Ok(()) } @@ -64,23 +78,154 @@ pub fn index_vault_note(note: &crate::registry::VaultNote) -> anyhow::Result<()> #[cfg(test)] mod tests { use super::*; + use crate::registry::VaultNote; + use std::io::Write; + + fn init_isolated_index() + -> (tempfile::TempDir, tantivy::Index, tantivy::IndexWriter, tantivy::schema::Schema) { + let tmp = tempfile::tempdir().unwrap(); + let (index, _reader) = search::init_index_at(tmp.path()).unwrap(); + let writer = search::get_writer(&index).unwrap(); + let schema = index.schema(); + (tmp, index, writer, schema) + } + + #[test] + fn test_reindex_vault_core_empty() { + let (_tmp, _index, mut writer, schema) = init_isolated_index(); + // Seed a dummy vault doc so we can verify deletion works + search::add_vault_doc(&mut writer, &schema, "dummy", "Dummy", "content", &[]).unwrap(); + writer.commit().unwrap(); + + let notes: Vec = vec![]; + reindex_vault_core(¬es, &mut writer, &schema).unwrap(); + + // After reindex with empty notes, vault docs should be gone + let reader = _index.reader().unwrap(); + let searcher = reader.searcher(); + let doc_type = schema.get_field("doc_type").unwrap(); + let term = tantivy::Term::from_field_text(doc_type, "vault"); + let count = searcher + .search( + &tantivy::query::TermQuery::new(term, tantivy::schema::IndexRecordOption::Basic), + &tantivy::collector::Count, + ) + .unwrap(); + assert_eq!(count, 0); + } + + #[test] + fn test_reindex_vault_core_with_notes() { + let tmp = tempfile::tempdir().unwrap(); + let md_path = tmp.path().join("note.md"); + let mut file = std::fs::File::create(&md_path).unwrap(); + writeln!(file, "# Hello\n\nThis is a test note.").unwrap(); + drop(file); + + let note = VaultNote { + id: "note-1".to_string(), + path: md_path.to_str().unwrap().to_string(), + title: Some("Hello".to_string()), + content: "ignored".to_string(), + frontmatter: None, + tags: vec!["tag1".to_string()], + outgoing_links: vec![], + linked_repo: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }; + + let (_tmp, _index, mut writer, schema) = init_isolated_index(); + reindex_vault_core(&[note], &mut writer, &schema).unwrap(); + + // Verify the note was indexed by searching + let reader = _index.reader().unwrap(); + let searcher = reader.searcher(); + let doc_type = schema.get_field("doc_type").unwrap(); + let term = tantivy::Term::from_field_text(doc_type, "vault"); + let count = searcher + .search( + &tantivy::query::TermQuery::new(term, tantivy::schema::IndexRecordOption::Basic), + &tantivy::collector::Count, + ) + .unwrap(); + assert_eq!(count, 1); + } #[test] - fn test_index_vault_note_smoke() { - let note = crate::registry::VaultNote { - id: "test-note".to_string(), - path: "/tmp/test.md".to_string(), - title: Some("Test".to_string()), - content: "Hello world".to_string(), + fn test_index_vault_note_core_add() { + let note = VaultNote { + id: "note-add".to_string(), + path: "/tmp/add.md".to_string(), + title: Some("Add".to_string()), + content: "new content".to_string(), frontmatter: None, - tags: vec!["test".to_string()], + tags: vec!["add".to_string()], outgoing_links: vec![], linked_repo: None, created_at: chrono::Utc::now(), updated_at: chrono::Utc::now(), }; - // This may fail if Tantivy index is locked by another test; - // we only verify it does not panic. - let _ = index_vault_note(¬e); + + let (_tmp, _index, mut writer, schema) = init_isolated_index(); + index_vault_note_core(¬e, &mut writer, &schema).unwrap(); + + let reader = _index.reader().unwrap(); + let searcher = reader.searcher(); + let doc_type = schema.get_field("doc_type").unwrap(); + let term = tantivy::Term::from_field_text(doc_type, "vault"); + let count = searcher + .search( + &tantivy::query::TermQuery::new(term, tantivy::schema::IndexRecordOption::Basic), + &tantivy::collector::Count, + ) + .unwrap(); + assert_eq!(count, 1); + } + + #[test] + fn test_index_vault_note_core_update() { + let note = VaultNote { + id: "note-update".to_string(), + path: "/tmp/update.md".to_string(), + title: Some("Original".to_string()), + content: "original content".to_string(), + frontmatter: None, + tags: vec![], + outgoing_links: vec![], + linked_repo: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }; + + let (_tmp, _index, mut writer, schema) = init_isolated_index(); + index_vault_note_core(¬e, &mut writer, &schema).unwrap(); + + let updated = VaultNote { + id: "note-update".to_string(), + path: "/tmp/update.md".to_string(), + title: Some("Updated".to_string()), + content: "updated content".to_string(), + frontmatter: None, + tags: vec!["new-tag".to_string()], + outgoing_links: vec![], + linked_repo: None, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + }; + index_vault_note_core(&updated, &mut writer, &schema).unwrap(); + + // Tantivy delete + add semantics: old doc replaced, only 1 doc remains + let reader = _index.reader().unwrap(); + let searcher = reader.searcher(); + let doc_type = schema.get_field("doc_type").unwrap(); + let term = tantivy::Term::from_field_text(doc_type, "vault"); + let count = searcher + .search( + &tantivy::query::TermQuery::new(term, tantivy::schema::IndexRecordOption::Basic), + &tantivy::collector::Count, + ) + .unwrap(); + assert_eq!(count, 1); } } From 2d62826c5a995ac5eaf1335f41651d6687a2d5b2 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 12:38:43 +0800 Subject: [PATCH 10/17] =?UTF-8?q?test(semantic=5Findex):=20=E8=A1=A5?= =?UTF-8?q?=E9=BD=90=20persist.rs=20=E5=8D=95=E5=85=83=E6=B5=8B=E8=AF=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 覆盖 5 个数据库操作函数: - save_symbols: 批量替换旧数据 - save_symbols_incremental: 增量追加 - delete_symbols_for_files: 按文件删除 symbols + calls - save_calls: 批量替换旧 calls - save_calls_incremental: 增量追加 calls 使用 WorkspaceRegistry::init_in_memory() 提供隔离测试环境, 通过 SQL COUNT 查询验证数据库状态。 --- src/semantic_index/persist.rs | 132 ++++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) diff --git a/src/semantic_index/persist.rs b/src/semantic_index/persist.rs index 291c4d2..097d4bf 100644 --- a/src/semantic_index/persist.rs +++ b/src/semantic_index/persist.rs @@ -161,3 +161,135 @@ pub fn save_calls_incremental( tx.commit()?; Ok(inserted) } + +#[cfg(test)] +mod tests { + use super::*; + use crate::registry::test_helpers::WorkspaceRegistry; + use crate::semantic_index::SymbolType; + use std::path::PathBuf; + + fn sample_symbol(name: &str, file: &str) -> CodeSymbol { + CodeSymbol { + symbol_type: SymbolType::Function, + name: name.to_string(), + file_path: PathBuf::from(file), + line_start: 1, + line_end: 10, + signature: Some(format!("fn {}()", name)), + attributes: None, + } + } + + fn sample_call(caller_file: &str, caller_symbol: &str, callee: &str) -> CodeCall { + CodeCall { + caller_file: PathBuf::from(caller_file), + caller_symbol: caller_symbol.to_string(), + caller_line: 5, + callee_name: callee.to_string(), + } + } + + #[test] + fn test_save_symbols_replaces_old() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + let old = vec![sample_symbol("old_fn", "src/old.rs")]; + let new = vec![sample_symbol("new_fn", "src/new.rs")]; + + save_symbols(&mut conn, "repo1", &old).unwrap(); + save_symbols(&mut conn, "repo1", &new).unwrap(); + + let count: i64 = conn + .query_row("SELECT COUNT(*) FROM code_symbols WHERE repo_id = ?1", ["repo1"], |row| { + row.get(0) + }) + .unwrap(); + assert_eq!(count, 1); + } + + #[test] + fn test_save_symbols_incremental() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + let first = vec![sample_symbol("fn_a", "src/a.rs")]; + let second = vec![sample_symbol("fn_b", "src/b.rs")]; + + save_symbols_incremental(&mut conn, "repo1", &first).unwrap(); + save_symbols_incremental(&mut conn, "repo1", &second).unwrap(); + + let count: i64 = conn + .query_row("SELECT COUNT(*) FROM code_symbols WHERE repo_id = ?1", ["repo1"], |row| { + row.get(0) + }) + .unwrap(); + assert_eq!(count, 2); + } + + #[test] + fn test_delete_symbols_for_files() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + let symbols = vec![sample_symbol("fn_a", "src/a.rs"), sample_symbol("fn_b", "src/b.rs")]; + let calls = vec![ + sample_call("src/a.rs", "fn_a", "helper"), + sample_call("src/b.rs", "fn_b", "helper"), + ]; + + save_symbols(&mut conn, "repo1", &symbols).unwrap(); + save_calls(&mut conn, "repo1", &calls).unwrap(); + + delete_symbols_for_files(&mut conn, "repo1", &["src/a.rs".to_string()]).unwrap(); + + let sym_count: i64 = conn + .query_row("SELECT COUNT(*) FROM code_symbols WHERE repo_id = ?1", ["repo1"], |row| { + row.get(0) + }) + .unwrap(); + assert_eq!(sym_count, 1); + + let call_count: i64 = conn + .query_row( + "SELECT COUNT(*) FROM code_call_graph WHERE repo_id = ?1", + ["repo1"], + |row| row.get(0), + ) + .unwrap(); + assert_eq!(call_count, 1); + } + + #[test] + fn test_save_calls_replaces_old() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + let old = vec![sample_call("src/old.rs", "old_fn", "callee1")]; + let new = vec![sample_call("src/new.rs", "new_fn", "callee2")]; + + save_calls(&mut conn, "repo1", &old).unwrap(); + save_calls(&mut conn, "repo1", &new).unwrap(); + + let count: i64 = conn + .query_row( + "SELECT COUNT(*) FROM code_call_graph WHERE repo_id = ?1", + ["repo1"], + |row| row.get(0), + ) + .unwrap(); + assert_eq!(count, 1); + } + + #[test] + fn test_save_calls_incremental() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + let first = vec![sample_call("src/a.rs", "fn_a", "callee1")]; + let second = vec![sample_call("src/b.rs", "fn_b", "callee2")]; + + save_calls_incremental(&mut conn, "repo1", &first).unwrap(); + save_calls_incremental(&mut conn, "repo1", &second).unwrap(); + + let count: i64 = conn + .query_row( + "SELECT COUNT(*) FROM code_call_graph WHERE repo_id = ?1", + ["repo1"], + |row| row.get(0), + ) + .unwrap(); + assert_eq!(count, 2); + } +} From 122fcbf944e97f4a56d3c94a170c168077fb4476 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 13:05:20 +0800 Subject: [PATCH 11/17] =?UTF-8?q?test(discovery=5Fengine):=20=E8=A1=A5?= =?UTF-8?q?=E9=BD=90=20package=5Fjson=E3=80=81go=5Fmod=E3=80=81similar=5Fp?= =?UTF-8?q?rojects=20=E6=B5=8B=E8=AF=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - test_discover_dependencies_package_json: 覆盖 npm 依赖发现路径 - test_discover_dependencies_go_mod: 覆盖 Go module 依赖发现路径 - test_discover_similar_projects: 覆盖 repo_summaries 关键词 Jaccard 相似度计算 --- src/discovery_engine.rs | 129 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) diff --git a/src/discovery_engine.rs b/src/discovery_engine.rs index a148a58..ef6d1c4 100644 --- a/src/discovery_engine.rs +++ b/src/discovery_engine.rs @@ -300,4 +300,133 @@ my-lib = { path = "../my-lib" } assert!(discoveries.is_empty()); std::fs::remove_dir_all(&tmp).unwrap(); } + + #[test] + fn test_discover_dependencies_package_json() { + let tmp1 = + std::env::temp_dir().join(format!("devbase_discover_npm1_{}", std::process::id())); + let tmp2 = + std::env::temp_dir().join(format!("devbase_discover_npm2_{}", std::process::id())); + std::fs::create_dir_all(&tmp1).unwrap(); + std::fs::create_dir_all(&tmp2).unwrap(); + std::fs::write(tmp1.join("package.json"), r#"{"dependencies": {"my-lib": "^1.0.0"}}"#) + .unwrap(); + + let repo1 = RepoEntry { + id: "app".to_string(), + local_path: tmp1.clone(), + tags: vec![], + discovered_at: chrono::Utc::now(), + language: Some("node".to_string()), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + let repo2 = RepoEntry { + id: "my-lib".to_string(), + local_path: tmp2.clone(), + tags: vec![], + discovered_at: chrono::Utc::now(), + language: Some("node".to_string()), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + + let discoveries = discover_dependencies(&[repo1, repo2]); + assert!(!discoveries.is_empty()); + assert_eq!(discoveries[0].from, "app"); + assert_eq!(discoveries[0].to, "my-lib"); + assert_eq!(discoveries[0].relation_type, "depends_on"); + + std::fs::remove_dir_all(&tmp1).unwrap(); + std::fs::remove_dir_all(&tmp2).unwrap(); + } + + #[test] + fn test_discover_dependencies_go_mod() { + let tmp1 = + std::env::temp_dir().join(format!("devbase_discover_go1_{}", std::process::id())); + let tmp2 = + std::env::temp_dir().join(format!("devbase_discover_go2_{}", std::process::id())); + std::fs::create_dir_all(&tmp1).unwrap(); + std::fs::create_dir_all(&tmp2).unwrap(); + std::fs::write( + tmp1.join("go.mod"), + "module example.com/app\n\nrequire (\n\texample.com/my-lib v1.0.0\n)\n", + ) + .unwrap(); + + let repo1 = RepoEntry { + id: "example.com/app".to_string(), + local_path: tmp1.clone(), + tags: vec![], + discovered_at: chrono::Utc::now(), + language: Some("go".to_string()), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + let repo2 = RepoEntry { + id: "example.com/my-lib".to_string(), + local_path: tmp2.clone(), + tags: vec![], + discovered_at: chrono::Utc::now(), + language: Some("go".to_string()), + workspace_type: "git".to_string(), + data_tier: "private".to_string(), + last_synced_at: None, + stars: None, + remotes: vec![], + }; + + let discoveries = discover_dependencies(&[repo1, repo2]); + assert!(!discoveries.is_empty()); + assert_eq!(discoveries[0].from, "example.com/app"); + assert_eq!(discoveries[0].to, "example.com/my-lib"); + assert_eq!(discoveries[0].relation_type, "depends_on"); + + std::fs::remove_dir_all(&tmp1).unwrap(); + std::fs::remove_dir_all(&tmp2).unwrap(); + } + + #[test] + fn test_discover_similar_projects() { + let conn = crate::registry::test_helpers::WorkspaceRegistry::init_in_memory().unwrap(); + conn.execute( + "INSERT INTO repo_summaries (repo_id, keywords, generated_at) VALUES (?1, ?2, datetime('now'))", + ["repo-a", "rust,cli,tool"], + ) + .unwrap(); + conn.execute( + "INSERT INTO repo_summaries (repo_id, keywords, generated_at) VALUES (?1, ?2, datetime('now'))", + ["repo-b", "rust,web,server"], + ) + .unwrap(); + conn.execute( + "INSERT INTO repo_summaries (repo_id, keywords, generated_at) VALUES (?1, ?2, datetime('now'))", + ["repo-c", "python,ml,ai"], + ) + .unwrap(); + + let discoveries = discover_similar_projects(&conn).unwrap(); + assert!(!discoveries.is_empty()); + + // repo-a and repo-b share "rust" + let ab = discoveries.iter().find(|d| { + (d.from == "repo-a" && d.to == "repo-b") || (d.from == "repo-b" && d.to == "repo-a") + }); + assert!(ab.is_some(), "expected repo-a/repo-b similarity"); + assert_eq!(ab.unwrap().relation_type, "similar_to"); + + // repo-c has no overlap with others + let c_involved = discoveries.iter().any(|d| d.from == "repo-c" || d.to == "repo-c"); + assert!(!c_involved, "repo-c should have no similar projects"); + } } From 55ed67e1a966d408e75c781a0a4cabbab38d9656 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 16:48:07 +0800 Subject: [PATCH 12/17] chore(deps): patch upgrade assert_cmd/blake3/tantivy/tokio MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - assert_cmd 2.2.1 → 2.2.2 - blake3 1.8.4 → 1.8.5 - tantivy 0.26.0 → 0.26.1 - tokio 1.52.1 → 1.52.3 验证: cargo test --all-targets 全绿 (448 passed) --- Cargo.lock | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8f32452..df489c0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -88,7 +88,7 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.60.2", ] [[package]] @@ -99,7 +99,7 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.61.2", + "windows-sys 0.60.2", ] [[package]] @@ -140,9 +140,9 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "assert_cmd" -version = "2.2.1" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39bae1d3fa576f7c6519514180a72559268dd7d1fe104070956cb687bc6673bd" +checksum = "2aa3a22042e45de04255c7bf3626e239f450200fd0493c1e382263544b20aea6" dependencies = [ "anstyle", "bstr", @@ -256,9 +256,9 @@ dependencies = [ [[package]] name = "blake3" -version = "1.8.4" +version = "1.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d2d5991425dfd0785aed03aedcf0b321d61975c9b5b3689c774a2610ae0b51e" +checksum = "0aa83c34e62843d924f905e0f5c866eb1dd6545fc4d719e803d9ba6030371fce" dependencies = [ "arrayref", "arrayvec", @@ -293,7 +293,7 @@ version = "3.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "519bd3116aeeb42d5372c29d982d16d0170d3d4a5ed85fc7dd91642ffff3c67c" dependencies = [ - "darling 0.23.0", + "darling 0.20.11", "ident_case", "prettyplease", "proc-macro2", @@ -593,7 +593,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] @@ -1319,7 +1319,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.61.2", + "windows-sys 0.60.2", ] [[package]] @@ -1465,7 +1465,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -2491,7 +2491,7 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -2996,7 +2996,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.60.2", ] [[package]] @@ -3909,7 +3909,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3922,7 +3922,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.12.1", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -4245,7 +4245,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ "libc", - "windows-sys 0.61.2", + "windows-sys 0.60.2", ] [[package]] @@ -4412,9 +4412,9 @@ dependencies = [ [[package]] name = "tantivy" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "778da245841522199d512d19511b041425d8cff3a8f262b4e1516fceb050289a" +checksum = "edde6a10743fff00a4e1a8c9ef020bf5f3cbad301b7d2d39f2b07f123c4eac07" dependencies = [ "aho-corasick", "arc-swap", @@ -4568,7 +4568,7 @@ dependencies = [ "getrandom 0.4.2", "once_cell", "rustix 1.1.4", - "windows-sys 0.61.2", + "windows-sys 0.52.0", ] [[package]] @@ -4610,7 +4610,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "230a1b821ccbd75b185820a1f1ff7b14d21da1e442e22c0863ea5f08771a8874" dependencies = [ "rustix 1.1.4", - "windows-sys 0.61.2", + "windows-sys 0.60.2", ] [[package]] @@ -4852,9 +4852,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.52.1" +version = "1.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b67dee974fe86fd92cc45b7a95fdd2f99a36a6d7b0d431a231178d3d670bbcc6" +checksum = "8fc7f01b389ac15039e4dc9531aa973a135d7a4135281b12d7c1bc79fd57fffe" dependencies = [ "bytes", "libc", @@ -5677,7 +5677,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.48.0", ] [[package]] From e8d0fe4056f9168d8043456c120d6d4aeb38744b Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 16:53:54 +0800 Subject: [PATCH 13/17] =?UTF-8?q?docs:=20=E5=AF=B9=E9=BD=90=20README=20Too?= =?UTF-8?q?l=20=E7=9F=A9=E9=98=B5=E4=B8=8E=E5=AE=9E=E9=99=85=E4=BB=A3?= =?UTF-8?q?=E7=A0=81=EF=BC=8848=20tools=EF=BC=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 删除不存在的 devkit_skill_top - 新增 5 个遗漏工具:devkit_oplog_query、devkit_skill_discover、 devkit_vault_daily、devkit_vault_graph、devkit_workflow_status --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e7c9379..6fce245 100644 --- a/README.md +++ b/README.md @@ -258,10 +258,14 @@ TUI `[:]` 触发 embedding 语义搜索,失败自动降级为文本搜索。AI | `devkit_skill_run` | 执行 Skill | "运行 embed-repo skill" | | `devkit_workflow_list` | 列出工作流 | "有哪些工作流?" | | `devkit_workflow_run` | 执行工作流 | "运行 deploy-staging" | -| `devkit_skill_top` | Top 评分 Skills | "评分最高的 skill?" | +| `devkit_workflow_status` | 查询工作流执行状态 | "检查工作流是否完成" | | `devkit_relation_store` | 存储实体关系 | "记录 devbase 依赖 clarity" | | `devkit_relation_query` | 查询实体关系 | "谁依赖了 devbase?" | | `devkit_relation_delete` | 删除实体关系 | "移除已弃用的关系" | +| `devkit_oplog_query` | 查询操作日志 | "最近 devbase 做了什么?" | +| `devkit_skill_discover` | 自动发现 Skill | "把这个项目打包成 Skill" | +| `devkit_vault_daily` | 生成每日笔记 | "创建今日日报" | +| `devkit_vault_graph` | 导出知识图谱 | "可视化笔记关联" | | `devkit_known_limit_store` | 记录 known limit | "记录系统约束" | | `devkit_known_limit_list` | 列出 known limits | "查看当前风险" | From 5b008d492955e9f40c9e86d459e6b884a319ae9e Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 17:30:15 +0800 Subject: [PATCH 14/17] =?UTF-8?q?feat(mcp):=20add=20devkit=5Fevaluate=20?= =?UTF-8?q?=E2=80=94=20AI=20self-evaluation=20tool=20(Claude=20Computer=20?= =?UTF-8?q?Use=20inspired)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## 设计 借鉴 Claude Computer Use 的闭环思想:AI 执行代码修改后, 通过 devkit_evaluate 自动验证正确性,形成 Act → Evaluate → Decide 循环。 ## 功能 - check_only (默认, ~10-30s): cargo check + clippy + fmt - lib: 增加 cargo test --lib --no-run 编译验证 - full: 增加 cargo test --all-targets --no-run 全目标验证 返回结构化 JSON 报告,含每项检查的 success/failure、耗时、输出预览。 ## 变更 - 新增 evaluate.rs: 实现 DevkitEvaluateTool - mcp/tools/mod.rs: 注册模块 - mcp/mod.rs: 注册枚举 + tier(Beta) + 实例化 - mcp/tests.rs: 工具数量 48→49 - README.md / AGENTS.md: 同步计数 --- AGENTS.md | 2 +- src/mcp/mod.rs | 6 ++ src/mcp/tests.rs | 3 +- src/mcp/tools/evaluate.rs | 204 ++++++++++++++++++++++++++++++++++++++ src/mcp/tools/mod.rs | 2 + 5 files changed, 215 insertions(+), 2 deletions(-) create mode 100644 src/mcp/tools/evaluate.rs diff --git a/AGENTS.md b/AGENTS.md index 0c11191..e01dc12 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -22,7 +22,7 @@ Skill Runtime 全生命周期已落地(含依赖管理 Schema v15),Schema - **Workspace**:`%LOCALAPPDATA%\devbase\workspace/` —— 文件系统 = source of truth - `vault/` —— PARA 结构:00-Inbox, 01-Projects, 02-Areas, 03-Resources, 04-Archives, 99-Meta - `assets/` —— 二进制资源 -- **MCP Server**:stdio only,**48 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` +- **MCP Server**:stdio only,**49 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` - **Kimi CLI 集成**:MCP server 已通过 `kimi mcp add` 注册,端到端验证通过(`kimi --print` 成功调用 `devkit_health`);项目级 skill 位于 `.kimi/skills/devbase-project/SKILL.md` - **统一节点模型**:`core::node::{Node, NodeType, Edge}` —— GitRepo / VaultNote / Asset / ExternalLink - **当前测试**:490+ workspace passed / 0 failed / 4 ignored(主 crate 390 + symbol-links 4 + sync-protocol 12 + core-types 3 + syncthing-client 2 + vault-frontmatter 5 + vault-wikilink 5 + workflow-interpolate 9 + workflow-model 2 + registry-health 3 + registry-metrics 4 + registry-workspace 5 + embedding 5 + skill-runtime-types 7 + skill-runtime-parser 3 + 其他 crates ~30);11/11 passed(integration `tests/cli.rs`) diff --git a/src/mcp/mod.rs b/src/mcp/mod.rs index b01a265..dcc6785 100644 --- a/src/mcp/mod.rs +++ b/src/mcp/mod.rs @@ -104,6 +104,7 @@ pub enum McpToolEnum { WorkflowRun(DevkitWorkflowRunTool), WorkflowStatus(DevkitWorkflowStatusTool), OplogQuery(DevkitOplogQueryTool), + Evaluate(DevkitEvaluateTool), } /// Stability tier for MCP tools. @@ -180,6 +181,7 @@ impl McpToolEnum { McpToolEnum::WorkflowRun(_) => ToolTier::Beta, McpToolEnum::WorkflowStatus(_) => ToolTier::Beta, McpToolEnum::OplogQuery(_) => ToolTier::Beta, + McpToolEnum::Evaluate(_) => ToolTier::Beta, } } } @@ -235,6 +237,7 @@ impl McpTool for McpToolEnum { McpToolEnum::WorkflowRun(t) => t.name(), McpToolEnum::WorkflowStatus(t) => t.name(), McpToolEnum::OplogQuery(t) => t.name(), + McpToolEnum::Evaluate(t) => t.name(), } } @@ -288,6 +291,7 @@ impl McpTool for McpToolEnum { McpToolEnum::WorkflowRun(t) => t.schema(), McpToolEnum::WorkflowStatus(t) => t.schema(), McpToolEnum::OplogQuery(t) => t.schema(), + McpToolEnum::Evaluate(t) => t.schema(), } } @@ -345,6 +349,7 @@ impl McpTool for McpToolEnum { McpToolEnum::WorkflowRun(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowStatus(t) => t.invoke(args, ctx).await, McpToolEnum::OplogQuery(t) => t.invoke(args, ctx).await, + McpToolEnum::Evaluate(t) => t.invoke(args, ctx).await, } } } @@ -592,6 +597,7 @@ pub fn build_server_with_tiers(tiers: Option<&HashSet>) -> McpServer { McpToolEnum::WorkflowRun(DevkitWorkflowRunTool), McpToolEnum::WorkflowStatus(DevkitWorkflowStatusTool), McpToolEnum::OplogQuery(DevkitOplogQueryTool), + McpToolEnum::Evaluate(DevkitEvaluateTool), ]; for tool in all_tools { if let Some(allowed) = tiers diff --git a/src/mcp/tests.rs b/src/mcp/tests.rs index 2f30c1d..2d199ba 100644 --- a/src/mcp/tests.rs +++ b/src/mcp/tests.rs @@ -39,8 +39,9 @@ async fn test_tools_list() { let (mut ctx, _tmp) = test_ctx(); let resp = server.handle_request(req, &mut ctx).await.unwrap(); let tools = resp.get("result").unwrap().get("tools").unwrap().as_array().unwrap(); - assert_eq!(tools.len(), 48); + assert_eq!(tools.len(), 49); let names: Vec<&str> = tools.iter().map(|t| t.get("name").unwrap().as_str().unwrap()).collect(); + assert!(names.contains(&"devkit_evaluate")); assert!(names.contains(&"devkit_scan")); assert!(names.contains(&"devkit_health")); assert!(names.contains(&"devkit_sync")); diff --git a/src/mcp/tools/evaluate.rs b/src/mcp/tools/evaluate.rs new file mode 100644 index 0000000..667e599 --- /dev/null +++ b/src/mcp/tools/evaluate.rs @@ -0,0 +1,204 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2026 juice094 +//! devkit_evaluate: AI self-evaluation tool inspired by Claude Computer Use. +//! +//! After the AI makes code changes, it can call this tool to automatically +//! verify correctness via cargo check / clippy / fmt / test (no-run). +//! This closes the loop: AI acts → AI evaluates → AI decides next step. + +use super::super::McpTool; +use crate::storage::AppContext; +use serde_json::json; +use std::process::Command; +use std::time::Instant; + +#[derive(Clone)] +pub struct DevkitEvaluateTool; + +impl McpTool for DevkitEvaluateTool { + fn name(&self) -> &'static str { + "devkit_evaluate" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Run automated quality checks and return a structured report. + +Use this when the AI (or user) wants to: +- Verify that recent changes compile without errors +- Check for clippy warnings or formatting issues +- Get a quick quality assessment before committing or merging + +Scopes: +- "check_only" (default, fastest): cargo check + clippy + fmt — ~10-30s +- "lib": cargo test --lib --no-run + clippy + fmt — verifies test compilation +- "full": cargo test --all-targets --no-run + clippy + fmt — verifies all targets + +Returns a structured JSON report with success/failure per check and captured output snippets."#, + "inputSchema": { + "type": "object", + "properties": { + "scope": { + "type": "string", + "enum": ["check_only", "lib", "full"], + "description": "Evaluation scope. Default: check_only" + } + } + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + _ctx: &mut AppContext, + ) -> anyhow::Result { + let scope = args.get("scope").and_then(|v| v.as_str()).unwrap_or("check_only"); + + let start = Instant::now(); + + let check = run_cargo_check()?; + let clippy = run_cargo_clippy()?; + let fmt = run_cargo_fmt_check()?; + let test_compile = if scope == "lib" { + Some(run_cargo_test_lib_no_run()?) + } else if scope == "full" { + Some(run_cargo_test_all_no_run()?) + } else { + None + }; + + let overall_success = check.success + && clippy.success + && fmt.success + && test_compile.as_ref().is_none_or(|t| t.success); + + let total_ms = start.elapsed().as_millis() as i64; + + Ok(json!({ + "success": overall_success, + "scope": scope, + "check": check.into_json(), + "clippy": clippy.into_json(), + "fmt": fmt.into_json(), + "test_compile": test_compile.map(|t| t.into_json()), + "total_duration_ms": total_ms, + })) + } +} + +struct CheckResult { + success: bool, + duration_ms: i64, + output: String, +} + +impl CheckResult { + fn into_json(self) -> serde_json::Value { + json!({ + "success": self.success, + "duration_ms": self.duration_ms, + "output_preview": truncate_output(&self.output, 2000), + }) + } +} + +fn run_cargo_check() -> anyhow::Result { + let start = Instant::now(); + let (success, output) = run_command(Command::new("cargo").args(["check", "--all-targets"]))?; + Ok(CheckResult { + success, + duration_ms: start.elapsed().as_millis() as i64, + output, + }) +} + +fn run_cargo_clippy() -> anyhow::Result { + let start = Instant::now(); + let (success, output) = run_command(Command::new("cargo").args([ + "clippy", + "--all-targets", + "--", + "-D", + "warnings", + ]))?; + Ok(CheckResult { + success, + duration_ms: start.elapsed().as_millis() as i64, + output, + }) +} + +fn run_cargo_fmt_check() -> anyhow::Result { + let start = Instant::now(); + let (success, output) = run_command(Command::new("cargo").args(["fmt", "--check"]))?; + Ok(CheckResult { + success, + duration_ms: start.elapsed().as_millis() as i64, + output, + }) +} + +fn run_cargo_test_lib_no_run() -> anyhow::Result { + let start = Instant::now(); + let (success, output) = run_command(Command::new("cargo").args(["test", "--lib", "--no-run"]))?; + Ok(CheckResult { + success, + duration_ms: start.elapsed().as_millis() as i64, + output, + }) +} + +fn run_cargo_test_all_no_run() -> anyhow::Result { + let start = Instant::now(); + let (success, output) = + run_command(Command::new("cargo").args(["test", "--all-targets", "--no-run"]))?; + Ok(CheckResult { + success, + duration_ms: start.elapsed().as_millis() as i64, + output, + }) +} + +fn run_command(cmd: &mut Command) -> anyhow::Result<(bool, String)> { + let output = cmd.output()?; + let success = output.status.success(); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + let combined = if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{stdout}\n{stderr}") + }; + Ok((success, combined)) +} + +fn truncate_output(s: &str, max_len: usize) -> String { + if s.len() <= max_len { + s.to_string() + } else { + format!("{}\n...[truncated {} chars]", &s[..max_len], s.len() - max_len) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_truncate_output() { + assert_eq!(truncate_output("hello", 10), "hello"); + let long = "a".repeat(3000); + let truncated = truncate_output(&long, 2000); + assert!(truncated.contains("[truncated")); + assert!(truncated.len() < 2100); + } + + #[test] + fn test_evaluate_tool_name() { + let t = DevkitEvaluateTool; + assert_eq!(t.name(), "devkit_evaluate"); + } +} diff --git a/src/mcp/tools/mod.rs b/src/mcp/tools/mod.rs index 77586fc..264a7de 100644 --- a/src/mcp/tools/mod.rs +++ b/src/mcp/tools/mod.rs @@ -1,6 +1,7 @@ // SPDX-License-Identifier: MIT // Copyright (c) 2026 juice094 pub mod context; +pub mod evaluate; pub mod known_limit; pub mod oplog; pub mod query; @@ -28,6 +29,7 @@ pub use vault::*; pub use workflow::*; pub use code_analysis::*; +pub use evaluate::*; pub use external::*; pub use knowledge::*; pub use search::*; From 7e96947036b5ab47c4fe2c7c62d9880caa0ae402 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 20:46:36 +0800 Subject: [PATCH 15/17] =?UTF-8?q?feat(agent-contexts):=20v0.16.0=20P1=20?= =?UTF-8?q?=E2=80=94=20persistent=20AI=20sessions=20with=20typed=20memorie?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Schema v31: agent_contexts + agent_memories tables (v31_agent_contexts.rs) - Registry CRUD: upsert/list/get/archive/delete contexts, insert/list memories - MCP tools: devkit_session_save, devkit_session_list, devkit_session_resume - Tests: 5 unit tests for agent_context.rs, 1 integration test for tool list - Docs: AGENTS.md tool count updated 49 → 52 - Clippy clean, cargo fmt clean, 441 passed / 0 failed --- AGENTS.md | 2 +- src/mcp/mod.rs | 18 + src/mcp/tests.rs | 5 +- src/mcp/tools/mod.rs | 5 + src/mcp/tools/session.rs | 279 ++++++++++++++++ src/registry.rs | 1 + src/registry/agent_context.rs | 307 ++++++++++++++++++ src/registry/migrate.rs | 2 +- src/registry/migrations/mod.rs | 4 + src/registry/migrations/v31_agent_contexts.rs | 37 +++ src/registry/test_helpers.rs | 45 +++ 11 files changed, 702 insertions(+), 3 deletions(-) create mode 100644 src/mcp/tools/session.rs create mode 100644 src/registry/agent_context.rs create mode 100644 src/registry/migrations/v31_agent_contexts.rs diff --git a/AGENTS.md b/AGENTS.md index e01dc12..204068f 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -22,7 +22,7 @@ Skill Runtime 全生命周期已落地(含依赖管理 Schema v15),Schema - **Workspace**:`%LOCALAPPDATA%\devbase\workspace/` —— 文件系统 = source of truth - `vault/` —— PARA 结构:00-Inbox, 01-Projects, 02-Areas, 03-Resources, 04-Archives, 99-Meta - `assets/` —— 二进制资源 -- **MCP Server**:stdio only,**49 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` +- **MCP Server**:stdio only,**52 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 3 个 Agent Context tools + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` - **Kimi CLI 集成**:MCP server 已通过 `kimi mcp add` 注册,端到端验证通过(`kimi --print` 成功调用 `devkit_health`);项目级 skill 位于 `.kimi/skills/devbase-project/SKILL.md` - **统一节点模型**:`core::node::{Node, NodeType, Edge}` —— GitRepo / VaultNote / Asset / ExternalLink - **当前测试**:490+ workspace passed / 0 failed / 4 ignored(主 crate 390 + symbol-links 4 + sync-protocol 12 + core-types 3 + syncthing-client 2 + vault-frontmatter 5 + vault-wikilink 5 + workflow-interpolate 9 + workflow-model 2 + registry-health 3 + registry-metrics 4 + registry-workspace 5 + embedding 5 + skill-runtime-types 7 + skill-runtime-parser 3 + 其他 crates ~30);11/11 passed(integration `tests/cli.rs`) diff --git a/src/mcp/mod.rs b/src/mcp/mod.rs index dcc6785..4527a71 100644 --- a/src/mcp/mod.rs +++ b/src/mcp/mod.rs @@ -100,6 +100,9 @@ pub enum McpToolEnum { RelationStore(DevkitRelationStoreTool), RelationQuery(DevkitRelationQueryTool), RelationDelete(DevkitRelationDeleteTool), + SessionSave(DevkitSessionSaveTool), + SessionList(DevkitSessionListTool), + SessionResume(DevkitSessionResumeTool), WorkflowList(DevkitWorkflowListTool), WorkflowRun(DevkitWorkflowRunTool), WorkflowStatus(DevkitWorkflowStatusTool), @@ -177,6 +180,9 @@ impl McpToolEnum { McpToolEnum::RelationStore(_) => ToolTier::Beta, McpToolEnum::RelationQuery(_) => ToolTier::Beta, McpToolEnum::RelationDelete(_) => ToolTier::Beta, + McpToolEnum::SessionSave(_) => ToolTier::Beta, + McpToolEnum::SessionList(_) => ToolTier::Beta, + McpToolEnum::SessionResume(_) => ToolTier::Beta, McpToolEnum::WorkflowList(_) => ToolTier::Beta, McpToolEnum::WorkflowRun(_) => ToolTier::Beta, McpToolEnum::WorkflowStatus(_) => ToolTier::Beta, @@ -233,6 +239,9 @@ impl McpTool for McpToolEnum { McpToolEnum::RelationStore(t) => t.name(), McpToolEnum::RelationQuery(t) => t.name(), McpToolEnum::RelationDelete(t) => t.name(), + McpToolEnum::SessionSave(t) => t.name(), + McpToolEnum::SessionList(t) => t.name(), + McpToolEnum::SessionResume(t) => t.name(), McpToolEnum::WorkflowList(t) => t.name(), McpToolEnum::WorkflowRun(t) => t.name(), McpToolEnum::WorkflowStatus(t) => t.name(), @@ -287,6 +296,9 @@ impl McpTool for McpToolEnum { McpToolEnum::RelationStore(t) => t.schema(), McpToolEnum::RelationQuery(t) => t.schema(), McpToolEnum::RelationDelete(t) => t.schema(), + McpToolEnum::SessionSave(t) => t.schema(), + McpToolEnum::SessionList(t) => t.schema(), + McpToolEnum::SessionResume(t) => t.schema(), McpToolEnum::WorkflowList(t) => t.schema(), McpToolEnum::WorkflowRun(t) => t.schema(), McpToolEnum::WorkflowStatus(t) => t.schema(), @@ -345,6 +357,9 @@ impl McpTool for McpToolEnum { McpToolEnum::RelationStore(t) => t.invoke(args, ctx).await, McpToolEnum::RelationQuery(t) => t.invoke(args, ctx).await, McpToolEnum::RelationDelete(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionSave(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionList(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionResume(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowList(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowRun(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowStatus(t) => t.invoke(args, ctx).await, @@ -593,6 +608,9 @@ pub fn build_server_with_tiers(tiers: Option<&HashSet>) -> McpServer { McpToolEnum::RelationStore(DevkitRelationStoreTool), McpToolEnum::RelationQuery(DevkitRelationQueryTool), McpToolEnum::RelationDelete(DevkitRelationDeleteTool), + McpToolEnum::SessionSave(DevkitSessionSaveTool), + McpToolEnum::SessionList(DevkitSessionListTool), + McpToolEnum::SessionResume(DevkitSessionResumeTool), McpToolEnum::WorkflowList(DevkitWorkflowListTool), McpToolEnum::WorkflowRun(DevkitWorkflowRunTool), McpToolEnum::WorkflowStatus(DevkitWorkflowStatusTool), diff --git a/src/mcp/tests.rs b/src/mcp/tests.rs index 2d199ba..c88bdbb 100644 --- a/src/mcp/tests.rs +++ b/src/mcp/tests.rs @@ -39,8 +39,11 @@ async fn test_tools_list() { let (mut ctx, _tmp) = test_ctx(); let resp = server.handle_request(req, &mut ctx).await.unwrap(); let tools = resp.get("result").unwrap().get("tools").unwrap().as_array().unwrap(); - assert_eq!(tools.len(), 49); + assert_eq!(tools.len(), 52); let names: Vec<&str> = tools.iter().map(|t| t.get("name").unwrap().as_str().unwrap()).collect(); + assert!(names.contains(&"devkit_session_save")); + assert!(names.contains(&"devkit_session_list")); + assert!(names.contains(&"devkit_session_resume")); assert!(names.contains(&"devkit_evaluate")); assert!(names.contains(&"devkit_scan")); assert!(names.contains(&"devkit_health")); diff --git a/src/mcp/tools/mod.rs b/src/mcp/tools/mod.rs index 264a7de..c3b0de8 100644 --- a/src/mcp/tools/mod.rs +++ b/src/mcp/tools/mod.rs @@ -7,6 +7,7 @@ pub mod oplog; pub mod query; pub mod relations; pub mod repo; +pub mod session; pub mod skill; pub mod status; pub mod vault; @@ -23,6 +24,7 @@ pub use oplog::*; pub use query::*; pub use relations::*; pub use repo::*; +pub use session::*; pub use skill::*; pub use status::*; pub use vault::*; @@ -50,5 +52,8 @@ mod tests { let _ = super::vault::DevkitVaultDailyTool; let _ = super::vault::DevkitVaultGraphTool; let _ = super::workflow::DevkitWorkflowListTool; + let _ = super::session::DevkitSessionSaveTool; + let _ = super::session::DevkitSessionListTool; + let _ = super::session::DevkitSessionResumeTool; } } diff --git a/src/mcp/tools/session.rs b/src/mcp/tools/session.rs new file mode 100644 index 0000000..2407bb7 --- /dev/null +++ b/src/mcp/tools/session.rs @@ -0,0 +1,279 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2026 juice094 +//! MCP tools for Agent Context management (P1: Claude Projects inspired sessions). + +use crate::mcp::McpTool; +use crate::storage::AppContext; +use serde_json::json; + +#[derive(Clone)] +pub struct DevkitSessionSaveTool; + +impl McpTool for DevkitSessionSaveTool { + fn name(&self) -> &'static str { + "devkit_session_save" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Save or update an AI agent session context with optional memories. + +Use this when the user wants to: +- Create a new persistent project context +- Update an existing session's name, intent, or append memories +- Checkpoint current conversation state for later resumption + +Parameters: +- context_id: Unique session identifier (e.g., "project-alpha", "sprint-29"). +- name: Human-readable session name. +- intent: Optional high-level goal or project description. +- memories: Optional array of {type, content} objects to append. Types: decision, constraint, note, discovery, error."#, + "inputSchema": { + "type": "object", + "properties": { + "context_id": { "type": "string", "description": "Unique session ID" }, + "name": { "type": "string", "description": "Human-readable name" }, + "intent": { "type": "string", "description": "High-level goal / project description" }, + "memories": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { "type": "string", "description": "Memory type: decision, constraint, note, discovery, error" }, + "content": { "type": "string", "description": "Memory content" } + }, + "required": ["type", "content"] + } + } + }, + "required": ["context_id", "name"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let context_id = args.get("context_id").and_then(|v| v.as_str()).unwrap_or(""); + let name = args.get("name").and_then(|v| v.as_str()).unwrap_or(""); + let intent = args.get("intent").and_then(|v| v.as_str()); + if context_id.is_empty() { + anyhow::bail!("Missing required argument: context_id"); + } + if name.is_empty() { + anyhow::bail!("Missing required argument: name"); + } + + let mut conn = ctx.conn_mut()?; + crate::registry::agent_context::upsert_context(&mut conn, context_id, name, intent)?; + + let mut memory_count = 0; + if let Some(memories) = args.get("memories").and_then(|v| v.as_array()) { + for mem in memories { + let ty = mem.get("type").and_then(|v| v.as_str()).unwrap_or("note"); + let content = mem.get("content").and_then(|v| v.as_str()).unwrap_or(""); + if !content.is_empty() { + crate::registry::agent_context::insert_memory( + &mut conn, context_id, ty, content, + )?; + memory_count += 1; + } + } + } + + Ok(json!({ + "success": true, + "context_id": context_id, + "name": name, + "memories_added": memory_count + })) + } +} + +#[derive(Clone)] +pub struct DevkitSessionListTool; + +impl McpTool for DevkitSessionListTool { + fn name(&self) -> &'static str { + "devkit_session_list" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"List persisted AI agent sessions (contexts). + +Use this when the user wants to: +- See all active or archived sessions +- Find a session to resume +- Audit past project contexts"#, + "inputSchema": { + "type": "object", + "properties": { + "status_filter": { + "type": "string", + "enum": ["active", "archived"], + "description": "Filter by status. Omit for all." + }, + "limit": { + "type": "integer", + "description": "Maximum results", + "default": 50 + } + } + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let status_filter = args.get("status_filter").and_then(|v| v.as_str()); + let limit = args.get("limit").and_then(|v| v.as_u64()).unwrap_or(50) as usize; + + let conn = ctx.conn()?; + let contexts = crate::registry::agent_context::list_contexts(&conn)?; + let results: Vec = contexts + .into_iter() + .filter(|c| status_filter.is_none_or(|f| c.status == f)) + .take(limit) + .map(|c| { + json!({ + "context_id": c.id, + "name": c.name, + "intent": c.intent, + "status": c.status, + "updated_at": c.updated_at.to_rfc3339(), + }) + }) + .collect(); + + Ok(json!({ + "success": true, + "count": results.len(), + "contexts": results + })) + } +} + +#[derive(Clone)] +pub struct DevkitSessionResumeTool; + +impl McpTool for DevkitSessionResumeTool { + fn name(&self) -> &'static str { + "devkit_session_resume" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Resume a persisted AI agent session, returning its metadata and memories. + +Use this when the user wants to: +- Restore a previous project context +- Continue work from a checkpointed session +- Review all decisions and constraints stored in a session"#, + "inputSchema": { + "type": "object", + "properties": { + "context_id": { + "type": "string", + "description": "Session ID to resume" + }, + "include_memories": { + "type": "boolean", + "description": "Include associated memories", + "default": true + }, + "memory_types": { + "type": "array", + "items": { "type": "string" }, + "description": "Optional filter for memory types (e.g. [\"decision\", \"constraint\"])" + } + }, + "required": ["context_id"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let context_id = args.get("context_id").and_then(|v| v.as_str()).unwrap_or(""); + if context_id.is_empty() { + anyhow::bail!("Missing required argument: context_id"); + } + let include_memories = + args.get("include_memories").and_then(|v| v.as_bool()).unwrap_or(true); + let memory_types: Option> = args + .get("memory_types") + .and_then(|v| v.as_array()) + .map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect()); + + let conn = ctx.conn()?; + match crate::registry::agent_context::get_context_with_memories(&conn, context_id)? { + Some((ctx_info, mut memories)) => { + if let Some(types) = memory_types { + let type_set: std::collections::HashSet = types.into_iter().collect(); + memories.retain(|m| type_set.contains(&m.memory_type)); + } + let memory_json: Vec = if include_memories { + memories + .into_iter() + .map(|m| { + json!({ + "id": m.id, + "type": m.memory_type, + "content": m.content, + "created_at": m.created_at.to_rfc3339(), + }) + }) + .collect() + } else { + vec![] + }; + Ok(json!({ + "success": true, + "context": { + "context_id": ctx_info.id, + "name": ctx_info.name, + "intent": ctx_info.intent, + "status": ctx_info.status, + "created_at": ctx_info.created_at.to_rfc3339(), + "updated_at": ctx_info.updated_at.to_rfc3339(), + }, + "memories": memory_json, + "memory_count": memory_json.len(), + })) + } + None => Ok(json!({ + "success": false, + "error": format!("Session '{}' not found", context_id) + })), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::mcp::McpTool; + + #[test] + fn test_session_tool_names() { + assert_eq!(DevkitSessionSaveTool.name(), "devkit_session_save"); + assert_eq!(DevkitSessionListTool.name(), "devkit_session_list"); + assert_eq!(DevkitSessionResumeTool.name(), "devkit_session_resume"); + } + + #[test] + fn test_schemas_are_objects() { + assert!(DevkitSessionSaveTool.schema().is_object()); + assert!(DevkitSessionListTool.schema().is_object()); + assert!(DevkitSessionResumeTool.schema().is_object()); + } +} diff --git a/src/registry.rs b/src/registry.rs index 971e610..4f9371b 100644 --- a/src/registry.rs +++ b/src/registry.rs @@ -111,6 +111,7 @@ pub use entity::{ ENTITY_TYPE_WORKFLOW, upsert_entity, }; +pub mod agent_context; pub mod call_graph; pub mod code_symbols; pub mod dead_code; diff --git a/src/registry/agent_context.rs b/src/registry/agent_context.rs new file mode 100644 index 0000000..2673a6b --- /dev/null +++ b/src/registry/agent_context.rs @@ -0,0 +1,307 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2026 juice094 +//! Agent Context registry: CRUD for agent_contexts and agent_memories tables. +//! +//! Provides persistent AI session contexts (Claude Projects inspired) with +//! associated typed memories. All operations are transactional where needed. + +use chrono::{DateTime, Utc}; +use rusqlite::{Connection, OptionalExtension}; + +/// A persisted AI agent context (session / project scope). +#[derive(Debug, Clone, PartialEq)] +pub struct AgentContext { + pub id: String, + pub name: String, + pub intent: Option, + pub status: String, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +/// A typed memory entry attached to an AgentContext. +#[derive(Debug, Clone, PartialEq)] +pub struct AgentMemory { + pub id: i64, + pub context_id: String, + pub memory_type: String, + pub content: String, + pub created_at: DateTime, +} + +// --------------------------------------------------------------------------- +// Context CRUD +// --------------------------------------------------------------------------- + +/// Insert or replace an agent context. +pub fn upsert_context( + conn: &mut Connection, + id: &str, + name: &str, + intent: Option<&str>, +) -> anyhow::Result<()> { + let tx = conn.transaction()?; + let now = Utc::now().to_rfc3339(); + tx.execute( + "INSERT INTO agent_contexts (id, name, intent, status, created_at, updated_at) + VALUES (?1, ?2, ?3, 'active', ?4, ?4) + ON CONFLICT(id) DO UPDATE SET + name = excluded.name, + intent = excluded.intent, + status = 'active', + updated_at = excluded.updated_at", + rusqlite::params![id, name, intent, now], + )?; + tx.commit()?; + Ok(()) +} + +/// List all contexts ordered by most recently updated. +pub fn list_contexts(conn: &Connection) -> anyhow::Result> { + let mut stmt = conn.prepare( + "SELECT id, name, intent, status, created_at, updated_at + FROM agent_contexts + ORDER BY updated_at DESC", + )?; + let rows = stmt.query_map([], |row| { + let created_at = parse_datetime(row.get(4)?).map_err(|e| { + rusqlite::Error::FromSqlConversionFailure( + 4, + rusqlite::types::Type::Text, + Box::new(std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())), + ) + })?; + let updated_at = parse_datetime(row.get(5)?).map_err(|e| { + rusqlite::Error::FromSqlConversionFailure( + 5, + rusqlite::types::Type::Text, + Box::new(std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())), + ) + })?; + Ok(AgentContext { + id: row.get(0)?, + name: row.get(1)?, + intent: row.get(2)?, + status: row.get(3)?, + created_at, + updated_at, + }) + })?; + rows.collect::, _>>().map_err(Into::into) +} + +/// Get a single context by ID. +pub fn get_context(conn: &Connection, id: &str) -> anyhow::Result> { + let mut stmt = conn.prepare( + "SELECT id, name, intent, status, created_at, updated_at + FROM agent_contexts + WHERE id = ?1", + )?; + stmt.query_row([id], |row| { + let created_at = parse_datetime(row.get(4)?).map_err(|e| { + rusqlite::Error::FromSqlConversionFailure( + 4, + rusqlite::types::Type::Text, + Box::new(std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())), + ) + })?; + let updated_at = parse_datetime(row.get(5)?).map_err(|e| { + rusqlite::Error::FromSqlConversionFailure( + 5, + rusqlite::types::Type::Text, + Box::new(std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())), + ) + })?; + Ok(AgentContext { + id: row.get(0)?, + name: row.get(1)?, + intent: row.get(2)?, + status: row.get(3)?, + created_at, + updated_at, + }) + }) + .optional() + .map_err(Into::into) +} + +/// Get a context together with all its memories. +pub fn get_context_with_memories( + conn: &Connection, + id: &str, +) -> anyhow::Result)>> { + let ctx = match get_context(conn, id)? { + Some(c) => c, + None => return Ok(None), + }; + let memories = list_memories(conn, id)?; + Ok(Some((ctx, memories))) +} + +/// Archive a context (soft-delete via status change). +pub fn archive_context(conn: &mut Connection, id: &str) -> anyhow::Result { + let tx = conn.transaction()?; + let now = Utc::now().to_rfc3339(); + let rows = tx.execute( + "UPDATE agent_contexts SET status = 'archived', updated_at = ?1 WHERE id = ?2", + rusqlite::params![now, id], + )?; + tx.commit()?; + Ok(rows > 0) +} + +/// Hard-delete a context and cascade-delete its memories. +pub fn delete_context(conn: &mut Connection, id: &str) -> anyhow::Result { + let tx = conn.transaction()?; + let rows = tx.execute("DELETE FROM agent_contexts WHERE id = ?1", [id])?; + tx.commit()?; + Ok(rows > 0) +} + +// --------------------------------------------------------------------------- +// Memory CRUD +// --------------------------------------------------------------------------- + +/// Insert a memory and return its auto-generated row id. +pub fn insert_memory( + conn: &mut Connection, + context_id: &str, + memory_type: &str, + content: &str, +) -> anyhow::Result { + let tx = conn.transaction()?; + tx.execute( + "INSERT INTO agent_memories (context_id, memory_type, content, created_at) + VALUES (?1, ?2, ?3, ?4)", + rusqlite::params![context_id, memory_type, content, Utc::now().to_rfc3339()], + )?; + let id = tx.last_insert_rowid(); + tx.commit()?; + Ok(id) +} + +/// List memories for a context, newest first. +pub fn list_memories(conn: &Connection, context_id: &str) -> anyhow::Result> { + let mut stmt = conn.prepare( + "SELECT id, context_id, memory_type, content, created_at + FROM agent_memories + WHERE context_id = ?1 + ORDER BY created_at DESC", + )?; + let rows = stmt.query_map([context_id], |row| { + let created_at = parse_datetime(row.get(4)?).map_err(|e| { + rusqlite::Error::FromSqlConversionFailure( + 4, + rusqlite::types::Type::Text, + Box::new(std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())), + ) + })?; + Ok(AgentMemory { + id: row.get(0)?, + context_id: row.get(1)?, + memory_type: row.get(2)?, + content: row.get(3)?, + created_at, + }) + })?; + rows.collect::, _>>().map_err(Into::into) +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +fn parse_datetime(s: String) -> anyhow::Result> { + DateTime::parse_from_rfc3339(&s) + .map(|dt| dt.with_timezone(&Utc)) + .or_else(|_| { + chrono::NaiveDateTime::parse_from_str(&s, "%Y-%m-%d %H:%M:%S") + .map(|ndt| DateTime::from_naive_utc_and_offset(ndt, Utc)) + }) + .map_err(|e| anyhow::anyhow!("Invalid datetime '{}': {}", s, e)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::registry::test_helpers::WorkspaceRegistry; + + #[test] + fn test_context_crud() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + + // Create + upsert_context(&mut conn, "ctx-1", "Project Alpha", Some("Rewrite auth layer")).unwrap(); + let ctx = get_context(&conn, "ctx-1").unwrap().expect("context exists"); + assert_eq!(ctx.name, "Project Alpha"); + assert_eq!(ctx.intent.as_deref(), Some("Rewrite auth layer")); + assert_eq!(ctx.status, "active"); + + // Update + upsert_context(&mut conn, "ctx-1", "Project Alpha+", Some("Rewrite auth + RBAC")).unwrap(); + let ctx2 = get_context(&conn, "ctx-1").unwrap().expect("context still exists"); + assert_eq!(ctx2.name, "Project Alpha+"); + + // List + let list = list_contexts(&conn).unwrap(); + assert_eq!(list.len(), 1); + + // Archive + assert!(archive_context(&mut conn, "ctx-1").unwrap()); + let archived = get_context(&conn, "ctx-1").unwrap().expect("context not deleted"); + assert_eq!(archived.status, "archived"); + + // Delete + assert!(delete_context(&mut conn, "ctx-1").unwrap()); + assert!(get_context(&conn, "ctx-1").unwrap().is_none()); + } + + #[test] + fn test_memory_crud() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + upsert_context(&mut conn, "ctx-mem", "Test", None).unwrap(); + + let id1 = insert_memory(&mut conn, "ctx-mem", "decision", "Use SQLite").unwrap(); + let id2 = insert_memory(&mut conn, "ctx-mem", "constraint", "Must be <50ms").unwrap(); + assert!(id1 > 0); + assert!(id2 > 0); + + let memories = list_memories(&conn, "ctx-mem").unwrap(); + assert_eq!(memories.len(), 2); + // Newest first + assert_eq!(memories[0].memory_type, "constraint"); + assert_eq!(memories[1].memory_type, "decision"); + } + + #[test] + fn test_get_context_with_memories() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + upsert_context(&mut conn, "ctx-full", "Full", Some("intent")).unwrap(); + insert_memory(&mut conn, "ctx-full", "note", "content").unwrap(); + + let result = get_context_with_memories(&conn, "ctx-full").unwrap(); + assert!(result.is_some()); + let (ctx, mems) = result.unwrap(); + assert_eq!(ctx.name, "Full"); + assert_eq!(mems.len(), 1); + assert_eq!(mems[0].content, "content"); + } + + #[test] + fn test_missing_context() { + let conn = WorkspaceRegistry::init_in_memory().unwrap(); + assert!(get_context(&conn, "nope").unwrap().is_none()); + assert!(get_context_with_memories(&conn, "nope").unwrap().is_none()); + } + + #[test] + fn test_cascade_delete() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + upsert_context(&mut conn, "ctx-cascade", "Cascade", None).unwrap(); + insert_memory(&mut conn, "ctx-cascade", "t", "data").unwrap(); + + delete_context(&mut conn, "ctx-cascade").unwrap(); + let mems = list_memories(&conn, "ctx-cascade").unwrap(); + assert!(mems.is_empty()); + } +} diff --git a/src/registry/migrate.rs b/src/registry/migrate.rs index 1a9dc4c..efb85e3 100644 --- a/src/registry/migrate.rs +++ b/src/registry/migrate.rs @@ -4,7 +4,7 @@ use super::*; use crate::storage::StorageBackend; use std::path::PathBuf; -pub const CURRENT_SCHEMA_VERSION: i32 = 30; +pub const CURRENT_SCHEMA_VERSION: i32 = 31; impl WorkspaceRegistry { pub fn db_path() -> anyhow::Result { diff --git a/src/registry/migrations/mod.rs b/src/registry/migrations/mod.rs index f922d11..aafd75e 100644 --- a/src/registry/migrations/mod.rs +++ b/src/registry/migrations/mod.rs @@ -32,6 +32,7 @@ pub mod v27_repo_index_state; pub mod v28_embedding_precision; pub mod v29_compensation_log; pub mod v30_code_symbol_attributes; +pub mod v31_agent_contexts; pub fn run_all(conn: &mut Connection) -> anyhow::Result<()> { let user_version: i32 = conn.query_row("PRAGMA user_version", [], |row| row.get(0))?; @@ -126,6 +127,9 @@ pub fn run_all(conn: &mut Connection) -> anyhow::Result<()> { if user_version < 30 { v30_code_symbol_attributes::run(conn)?; } + if user_version < 31 { + v31_agent_contexts::run(conn)?; + } Ok(()) } diff --git a/src/registry/migrations/v31_agent_contexts.rs b/src/registry/migrations/v31_agent_contexts.rs new file mode 100644 index 0000000..bbb4ccd --- /dev/null +++ b/src/registry/migrations/v31_agent_contexts.rs @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2026 juice094 +use rusqlite::Connection; + +pub fn run(conn: &Connection) -> anyhow::Result<()> { + conn.execute( + "CREATE TABLE IF NOT EXISTS agent_contexts ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + intent TEXT, + status TEXT DEFAULT 'active', + created_at DATETIME DEFAULT current_timestamp, + updated_at DATETIME DEFAULT current_timestamp + )", + [], + )?; + + conn.execute( + "CREATE TABLE IF NOT EXISTS agent_memories ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + context_id TEXT NOT NULL, + memory_type TEXT NOT NULL, + content TEXT NOT NULL, + created_at DATETIME DEFAULT current_timestamp, + FOREIGN KEY (context_id) REFERENCES agent_contexts(id) ON DELETE CASCADE + )", + [], + )?; + + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_agent_memories_context ON agent_memories(context_id)", + [], + )?; + + conn.execute("PRAGMA user_version = 31", [])?; + Ok(()) +} diff --git a/src/registry/test_helpers.rs b/src/registry/test_helpers.rs index ec57e55..a23584f 100644 --- a/src/registry/test_helpers.rs +++ b/src/registry/test_helpers.rs @@ -362,6 +362,25 @@ CREATE TABLE IF NOT EXISTS repo_index_state ( last_commit_hash TEXT, indexed_at DATETIME DEFAULT current_timestamp ); + +CREATE TABLE IF NOT EXISTS agent_contexts ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + intent TEXT, + status TEXT DEFAULT 'active', + created_at DATETIME DEFAULT current_timestamp, + updated_at DATETIME DEFAULT current_timestamp +); + +CREATE TABLE IF NOT EXISTS agent_memories ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + context_id TEXT NOT NULL, + memory_type TEXT NOT NULL, + content TEXT NOT NULL, + created_at DATETIME DEFAULT current_timestamp, + FOREIGN KEY (context_id) REFERENCES agent_contexts(id) ON DELETE CASCADE +); +CREATE INDEX IF NOT EXISTS idx_agent_memories_context ON agent_memories(context_id); "#; #[cfg(test)] @@ -413,4 +432,30 @@ mod tests { .unwrap_or(false); assert!(exists, "knowledge_meta table must exist in current schema"); } + + #[test] + fn test_agent_contexts_table_exists() { + let conn = WorkspaceRegistry::init_in_memory().unwrap(); + let exists: bool = conn + .query_row( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name='agent_contexts'", + [], + |_| Ok(true), + ) + .unwrap_or(false); + assert!(exists, "agent_contexts table must exist in current schema"); + } + + #[test] + fn test_agent_memories_table_exists() { + let conn = WorkspaceRegistry::init_in_memory().unwrap(); + let exists: bool = conn + .query_row( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name='agent_memories'", + [], + |_| Ok(true), + ) + .unwrap_or(false); + assert!(exists, "agent_memories table must exist in current schema"); + } } From 60bb8ed1291a15e65f3b28c1a9fb6db02c4c9099 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 21:10:47 +0800 Subject: [PATCH 16/17] =?UTF-8?q?feat(agent-contexts):=20v0.16.0=20P2/P2?= =?UTF-8?q?=20=E2=80=94=20context-aware=20execution=20+=20memory=20intelli?= =?UTF-8?q?gence?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit P2-A: Entity attachment layer - Schema v32: context_entity_links table (v32_context_links.rs) - Registry CRUD: attach_entity, detach_entity, list_linked_entities, list_linking_contexts - MCP tools: devkit_session_attach, devkit_session_detach - Resume tool now returns linked_entities P2-B: Skill Runtime context injection - resolve_active_context() reads DEVBASE_ACTIVE_CONTEXT env or .active_context state file - run_skill injects DEVBASE_CONTEXT_MEMORIES + DEVBASE_CONTEXT_LINKS when active - MCP tool: devkit_session_activate P2-C: Memory search - agent_context::search_memories with SQLite LIKE (context-scoped or global) - MCP tool: devkit_session_search P3-A: Auto-capture - MCP tool: devkit_session_capture (lightweight append-only memory recording) - Falls back to activated session if context_id omitted P3-B: Audit trail integration - Extended OplogEventType::AgentContext in devbase-registry-workspace crate - All mutating agent_context operations write to oplog (upsert, memory, attach, detach, archive, delete) Quality: clippy 0 warnings, fmt clean, 442 passed / 0 failed --- AGENTS.md | 2 +- crates/devbase-registry-workspace/src/lib.rs | 3 + src/mcp/mod.rs | 30 ++ src/mcp/tests.rs | 2 +- src/mcp/tools/mod.rs | 5 + src/mcp/tools/session.rs | 338 +++++++++++++++++++ src/registry/agent_context.rs | 178 ++++++++++ src/registry/migrate.rs | 2 +- src/registry/migrations/mod.rs | 4 + src/registry/migrations/v32_context_links.rs | 22 ++ src/registry/test_helpers.rs | 9 + src/skill_runtime/executor.rs | 55 +++ 12 files changed, 647 insertions(+), 3 deletions(-) create mode 100644 src/registry/migrations/v32_context_links.rs diff --git a/AGENTS.md b/AGENTS.md index 204068f..5486e56 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -22,7 +22,7 @@ Skill Runtime 全生命周期已落地(含依赖管理 Schema v15),Schema - **Workspace**:`%LOCALAPPDATA%\devbase\workspace/` —— 文件系统 = source of truth - `vault/` —— PARA 结构:00-Inbox, 01-Projects, 02-Areas, 03-Resources, 04-Archives, 99-Meta - `assets/` —— 二进制资源 -- **MCP Server**:stdio only,**52 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 3 个 Agent Context tools + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` +- **MCP Server**:stdio only,**57 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 8 个 Agent Context tools + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` - **Kimi CLI 集成**:MCP server 已通过 `kimi mcp add` 注册,端到端验证通过(`kimi --print` 成功调用 `devkit_health`);项目级 skill 位于 `.kimi/skills/devbase-project/SKILL.md` - **统一节点模型**:`core::node::{Node, NodeType, Edge}` —— GitRepo / VaultNote / Asset / ExternalLink - **当前测试**:490+ workspace passed / 0 failed / 4 ignored(主 crate 390 + symbol-links 4 + sync-protocol 12 + core-types 3 + syncthing-client 2 + vault-frontmatter 5 + vault-wikilink 5 + workflow-interpolate 9 + workflow-model 2 + registry-health 3 + registry-metrics 4 + registry-workspace 5 + embedding 5 + skill-runtime-types 7 + skill-runtime-parser 3 + 其他 crates ~30);11/11 passed(integration `tests/cli.rs`) diff --git a/crates/devbase-registry-workspace/src/lib.rs b/crates/devbase-registry-workspace/src/lib.rs index fe6b9a2..496a093 100644 --- a/crates/devbase-registry-workspace/src/lib.rs +++ b/crates/devbase-registry-workspace/src/lib.rs @@ -18,6 +18,7 @@ pub enum OplogEventType { Index, HealthCheck, KnownLimit, + AgentContext, } impl OplogEventType { @@ -28,6 +29,7 @@ impl OplogEventType { OplogEventType::Index => "index", OplogEventType::HealthCheck => "health_check", OplogEventType::KnownLimit => "known_limit", + OplogEventType::AgentContext => "agent_context", } } } @@ -42,6 +44,7 @@ impl std::str::FromStr for OplogEventType { "health_check" => Ok(OplogEventType::HealthCheck), "health" => Ok(OplogEventType::HealthCheck), "known_limit" => Ok(OplogEventType::KnownLimit), + "agent_context" => Ok(OplogEventType::AgentContext), _ => Err(()), } } diff --git a/src/mcp/mod.rs b/src/mcp/mod.rs index 4527a71..b49b91e 100644 --- a/src/mcp/mod.rs +++ b/src/mcp/mod.rs @@ -103,6 +103,11 @@ pub enum McpToolEnum { SessionSave(DevkitSessionSaveTool), SessionList(DevkitSessionListTool), SessionResume(DevkitSessionResumeTool), + SessionAttach(DevkitSessionAttachTool), + SessionDetach(DevkitSessionDetachTool), + SessionActivate(DevkitSessionActivateTool), + SessionSearch(DevkitSessionSearchTool), + SessionCapture(DevkitSessionCaptureTool), WorkflowList(DevkitWorkflowListTool), WorkflowRun(DevkitWorkflowRunTool), WorkflowStatus(DevkitWorkflowStatusTool), @@ -183,6 +188,11 @@ impl McpToolEnum { McpToolEnum::SessionSave(_) => ToolTier::Beta, McpToolEnum::SessionList(_) => ToolTier::Beta, McpToolEnum::SessionResume(_) => ToolTier::Beta, + McpToolEnum::SessionAttach(_) => ToolTier::Beta, + McpToolEnum::SessionDetach(_) => ToolTier::Beta, + McpToolEnum::SessionActivate(_) => ToolTier::Beta, + McpToolEnum::SessionSearch(_) => ToolTier::Beta, + McpToolEnum::SessionCapture(_) => ToolTier::Beta, McpToolEnum::WorkflowList(_) => ToolTier::Beta, McpToolEnum::WorkflowRun(_) => ToolTier::Beta, McpToolEnum::WorkflowStatus(_) => ToolTier::Beta, @@ -242,6 +252,11 @@ impl McpTool for McpToolEnum { McpToolEnum::SessionSave(t) => t.name(), McpToolEnum::SessionList(t) => t.name(), McpToolEnum::SessionResume(t) => t.name(), + McpToolEnum::SessionAttach(t) => t.name(), + McpToolEnum::SessionDetach(t) => t.name(), + McpToolEnum::SessionActivate(t) => t.name(), + McpToolEnum::SessionSearch(t) => t.name(), + McpToolEnum::SessionCapture(t) => t.name(), McpToolEnum::WorkflowList(t) => t.name(), McpToolEnum::WorkflowRun(t) => t.name(), McpToolEnum::WorkflowStatus(t) => t.name(), @@ -299,6 +314,11 @@ impl McpTool for McpToolEnum { McpToolEnum::SessionSave(t) => t.schema(), McpToolEnum::SessionList(t) => t.schema(), McpToolEnum::SessionResume(t) => t.schema(), + McpToolEnum::SessionAttach(t) => t.schema(), + McpToolEnum::SessionDetach(t) => t.schema(), + McpToolEnum::SessionActivate(t) => t.schema(), + McpToolEnum::SessionSearch(t) => t.schema(), + McpToolEnum::SessionCapture(t) => t.schema(), McpToolEnum::WorkflowList(t) => t.schema(), McpToolEnum::WorkflowRun(t) => t.schema(), McpToolEnum::WorkflowStatus(t) => t.schema(), @@ -360,6 +380,11 @@ impl McpTool for McpToolEnum { McpToolEnum::SessionSave(t) => t.invoke(args, ctx).await, McpToolEnum::SessionList(t) => t.invoke(args, ctx).await, McpToolEnum::SessionResume(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionAttach(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionDetach(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionActivate(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionSearch(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionCapture(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowList(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowRun(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowStatus(t) => t.invoke(args, ctx).await, @@ -611,6 +636,11 @@ pub fn build_server_with_tiers(tiers: Option<&HashSet>) -> McpServer { McpToolEnum::SessionSave(DevkitSessionSaveTool), McpToolEnum::SessionList(DevkitSessionListTool), McpToolEnum::SessionResume(DevkitSessionResumeTool), + McpToolEnum::SessionAttach(DevkitSessionAttachTool), + McpToolEnum::SessionDetach(DevkitSessionDetachTool), + McpToolEnum::SessionActivate(DevkitSessionActivateTool), + McpToolEnum::SessionSearch(DevkitSessionSearchTool), + McpToolEnum::SessionCapture(DevkitSessionCaptureTool), McpToolEnum::WorkflowList(DevkitWorkflowListTool), McpToolEnum::WorkflowRun(DevkitWorkflowRunTool), McpToolEnum::WorkflowStatus(DevkitWorkflowStatusTool), diff --git a/src/mcp/tests.rs b/src/mcp/tests.rs index c88bdbb..e925136 100644 --- a/src/mcp/tests.rs +++ b/src/mcp/tests.rs @@ -39,7 +39,7 @@ async fn test_tools_list() { let (mut ctx, _tmp) = test_ctx(); let resp = server.handle_request(req, &mut ctx).await.unwrap(); let tools = resp.get("result").unwrap().get("tools").unwrap().as_array().unwrap(); - assert_eq!(tools.len(), 52); + assert_eq!(tools.len(), 57); let names: Vec<&str> = tools.iter().map(|t| t.get("name").unwrap().as_str().unwrap()).collect(); assert!(names.contains(&"devkit_session_save")); assert!(names.contains(&"devkit_session_list")); diff --git a/src/mcp/tools/mod.rs b/src/mcp/tools/mod.rs index c3b0de8..9f5ccc5 100644 --- a/src/mcp/tools/mod.rs +++ b/src/mcp/tools/mod.rs @@ -55,5 +55,10 @@ mod tests { let _ = super::session::DevkitSessionSaveTool; let _ = super::session::DevkitSessionListTool; let _ = super::session::DevkitSessionResumeTool; + let _ = super::session::DevkitSessionAttachTool; + let _ = super::session::DevkitSessionDetachTool; + let _ = super::session::DevkitSessionActivateTool; + let _ = super::session::DevkitSessionSearchTool; + let _ = super::session::DevkitSessionCaptureTool; } } diff --git a/src/mcp/tools/session.rs b/src/mcp/tools/session.rs index 2407bb7..3638cb2 100644 --- a/src/mcp/tools/session.rs +++ b/src/mcp/tools/session.rs @@ -236,6 +236,19 @@ Use this when the user wants to: } else { vec![] }; + + let linked = + crate::registry::agent_context::list_linked_entities(&conn, context_id)?; + let linked_json: Vec = linked + .into_iter() + .map(|(eid, ltype, _cat)| { + json!({ + "entity_id": eid, + "link_type": ltype, + }) + }) + .collect(); + Ok(json!({ "success": true, "context": { @@ -248,6 +261,8 @@ Use this when the user wants to: }, "memories": memory_json, "memory_count": memory_json.len(), + "linked_entities": linked_json, + "linked_count": linked_json.len(), })) } None => Ok(json!({ @@ -258,6 +273,324 @@ Use this when the user wants to: } } +#[derive(Clone)] +pub struct DevkitSessionAttachTool; + +impl McpTool for DevkitSessionAttachTool { + fn name(&self) -> &'static str { + "devkit_session_attach" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Attach an entity (repo, vault note, skill, etc.) to an agent session. + +Use this when the user wants to: +- Link a repository to a project session +- Associate a skill or vault note with the current context +- Build a project workspace by connecting relevant resources"#, + "inputSchema": { + "type": "object", + "properties": { + "context_id": { "type": "string", "description": "Session ID" }, + "entity_id": { "type": "string", "description": "Entity ID (repo_id, vault path, skill_id, etc.)" }, + "link_type": { + "type": "string", + "enum": ["linked_repo", "linked_vault", "linked_skill", "linked_paper", "linked"], + "default": "linked", + "description": "Type of relationship" + } + }, + "required": ["context_id", "entity_id"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let context_id = args.get("context_id").and_then(|v| v.as_str()).unwrap_or(""); + let entity_id = args.get("entity_id").and_then(|v| v.as_str()).unwrap_or(""); + let link_type = args.get("link_type").and_then(|v| v.as_str()).unwrap_or("linked"); + if context_id.is_empty() { + anyhow::bail!("Missing required argument: context_id"); + } + if entity_id.is_empty() { + anyhow::bail!("Missing required argument: entity_id"); + } + + let mut conn = ctx.conn_mut()?; + crate::registry::agent_context::attach_entity(&mut conn, context_id, entity_id, link_type)?; + Ok(json!({ + "success": true, + "context_id": context_id, + "entity_id": entity_id, + "link_type": link_type, + })) + } +} + +#[derive(Clone)] +pub struct DevkitSessionDetachTool; + +impl McpTool for DevkitSessionDetachTool { + fn name(&self) -> &'static str { + "devkit_session_detach" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Detach an entity from an agent session. + +Use this when the user wants to: +- Remove a stale repository link +- Unlink a skill that is no longer relevant to the project"#, + "inputSchema": { + "type": "object", + "properties": { + "context_id": { "type": "string", "description": "Session ID" }, + "entity_id": { "type": "string", "description": "Entity ID to remove" }, + "link_type": { + "type": "string", + "description": "Specific link type to remove. Omit to remove all links to this entity." + } + }, + "required": ["context_id", "entity_id"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let context_id = args.get("context_id").and_then(|v| v.as_str()).unwrap_or(""); + let entity_id = args.get("entity_id").and_then(|v| v.as_str()).unwrap_or(""); + let link_type = args.get("link_type").and_then(|v| v.as_str()); + if context_id.is_empty() { + anyhow::bail!("Missing required argument: context_id"); + } + if entity_id.is_empty() { + anyhow::bail!("Missing required argument: entity_id"); + } + + let mut conn = ctx.conn_mut()?; + let removed = crate::registry::agent_context::detach_entity( + &mut conn, context_id, entity_id, link_type, + )?; + Ok(json!({ + "success": true, + "removed": removed, + "context_id": context_id, + "entity_id": entity_id, + })) + } +} + +#[derive(Clone)] +pub struct DevkitSessionActivateTool; + +impl McpTool for DevkitSessionActivateTool { + fn name(&self) -> &'static str { + "devkit_session_activate" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Activate a session so that subsequent skill executions automatically receive its memories and linked entities. + +Use this when the user wants to: +- Set a default project context for the current workspace +- Make all future skill runs context-aware without manual memory passing +- Switch between projects"#, + "inputSchema": { + "type": "object", + "properties": { + "context_id": { "type": "string", "description": "Session ID to activate" } + }, + "required": ["context_id"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + _ctx: &mut AppContext, + ) -> anyhow::Result { + let context_id = args.get("context_id").and_then(|v| v.as_str()).unwrap_or(""); + if context_id.is_empty() { + anyhow::bail!("Missing required argument: context_id"); + } + + let state_file = + crate::registry::WorkspaceRegistry::workspace_dir()?.join(".active_context"); + std::fs::write(&state_file, context_id)?; + + Ok(json!({ + "success": true, + "context_id": context_id, + "state_file": state_file.to_string_lossy().to_string(), + "tip": format!("Set DEVBASE_ACTIVE_CONTEXT={} in your environment to make this persistent across shell sessions.", context_id), + })) + } +} + +#[derive(Clone)] +pub struct DevkitSessionSearchTool; + +impl McpTool for DevkitSessionSearchTool { + fn name(&self) -> &'static str { + "devkit_session_search" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Search memories by keyword across all sessions or within a specific session. + +Use this when the user wants to: +- Find a past decision or constraint mentioned in memories +- Recall what was discussed in a previous project session +- Audit all sessions for a specific topic"#, + "inputSchema": { + "type": "object", + "properties": { + "query": { "type": "string", "description": "Keyword to search for" }, + "context_id": { + "type": "string", + "description": "Restrict search to a specific session. Omit for global search." + }, + "limit": { + "type": "integer", + "description": "Maximum results", + "default": 20 + } + }, + "required": ["query"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let query = args.get("query").and_then(|v| v.as_str()).unwrap_or(""); + let context_id = args.get("context_id").and_then(|v| v.as_str()); + let limit = args.get("limit").and_then(|v| v.as_u64()).unwrap_or(20) as usize; + if query.is_empty() { + anyhow::bail!("Missing required argument: query"); + } + + let conn = ctx.conn()?; + let memories = + crate::registry::agent_context::search_memories(&conn, context_id, query, limit)?; + let results: Vec = memories + .into_iter() + .map(|m| { + json!({ + "id": m.id, + "context_id": m.context_id, + "type": m.memory_type, + "content": m.content, + "created_at": m.created_at.to_rfc3339(), + }) + }) + .collect(); + + Ok(json!({ + "success": true, + "query": query, + "count": results.len(), + "memories": results, + })) + } +} + +#[derive(Clone)] +pub struct DevkitSessionCaptureTool; + +impl McpTool for DevkitSessionCaptureTool { + fn name(&self) -> &'static str { + "devkit_session_capture" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"Capture a decision, constraint, or observation into the active session's memory. + +Use this when the AI (or user) wants to: +- Record an architectural decision made during the conversation +- Save a constraint discovered while debugging +- Checkpoint a key insight before moving to another topic + +This is a lightweight append-only operation. No validation is performed on content."#, + "inputSchema": { + "type": "object", + "properties": { + "context_id": { + "type": "string", + "description": "Session ID. Omit to use the currently activated session (via devkit_session_activate)." + }, + "type": { + "type": "string", + "enum": ["decision", "constraint", "note", "discovery", "error", "action"], + "default": "note", + "description": "Memory classification" + }, + "content": { "type": "string", "description": "Memory content" } + }, + "required": ["content"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let content = args.get("content").and_then(|v| v.as_str()).unwrap_or(""); + if content.is_empty() { + anyhow::bail!("Missing required argument: content"); + } + let memory_type = args.get("type").and_then(|v| v.as_str()).unwrap_or("note"); + + let context_id = match args.get("context_id").and_then(|v| v.as_str()) { + Some(cid) if !cid.is_empty() => cid.to_string(), + _ => { + // Fallback: read activated session from state file + let state_file = + crate::registry::WorkspaceRegistry::workspace_dir()?.join(".active_context"); + std::fs::read_to_string(state_file) + .ok() + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .ok_or_else(|| anyhow::anyhow!("No active session. Use context_id argument or devkit_session_activate first."))? + } + }; + + let mut conn = ctx.conn_mut()?; + let id = crate::registry::agent_context::insert_memory( + &mut conn, + &context_id, + memory_type, + content, + )?; + + Ok(json!({ + "success": true, + "memory_id": id, + "context_id": context_id, + "type": memory_type, + })) + } +} + #[cfg(test)] mod tests { use super::*; @@ -268,6 +601,11 @@ mod tests { assert_eq!(DevkitSessionSaveTool.name(), "devkit_session_save"); assert_eq!(DevkitSessionListTool.name(), "devkit_session_list"); assert_eq!(DevkitSessionResumeTool.name(), "devkit_session_resume"); + assert_eq!(DevkitSessionAttachTool.name(), "devkit_session_attach"); + assert_eq!(DevkitSessionDetachTool.name(), "devkit_session_detach"); + assert_eq!(DevkitSessionActivateTool.name(), "devkit_session_activate"); + assert_eq!(DevkitSessionSearchTool.name(), "devkit_session_search"); + assert_eq!(DevkitSessionCaptureTool.name(), "devkit_session_capture"); } #[test] diff --git a/src/registry/agent_context.rs b/src/registry/agent_context.rs index 2673a6b..2a716df 100644 --- a/src/registry/agent_context.rs +++ b/src/registry/agent_context.rs @@ -53,6 +53,7 @@ pub fn upsert_context( rusqlite::params![id, name, intent, now], )?; tx.commit()?; + log_op(conn, "upsert_context", id, intent, "ok"); Ok(()) } @@ -147,6 +148,7 @@ pub fn archive_context(conn: &mut Connection, id: &str) -> anyhow::Result rusqlite::params![now, id], )?; tx.commit()?; + log_op(conn, "archive_context", id, None, "ok"); Ok(rows > 0) } @@ -155,6 +157,7 @@ pub fn delete_context(conn: &mut Connection, id: &str) -> anyhow::Result { let tx = conn.transaction()?; let rows = tx.execute("DELETE FROM agent_contexts WHERE id = ?1", [id])?; tx.commit()?; + log_op(conn, "delete_context", id, None, "ok"); Ok(rows > 0) } @@ -177,6 +180,7 @@ pub fn insert_memory( )?; let id = tx.last_insert_rowid(); tx.commit()?; + log_op(conn, "insert_memory", context_id, Some(content), "ok"); Ok(id) } @@ -207,6 +211,156 @@ pub fn list_memories(conn: &Connection, context_id: &str) -> anyhow::Result, _>>().map_err(Into::into) } +// --------------------------------------------------------------------------- +// Context → Entity Links +// --------------------------------------------------------------------------- + +/// Link an entity (repo, vault, skill, etc.) to a context. +pub fn attach_entity( + conn: &mut Connection, + context_id: &str, + entity_id: &str, + link_type: &str, +) -> anyhow::Result<()> { + let tx = conn.transaction()?; + tx.execute( + "INSERT OR REPLACE INTO context_entity_links (context_id, entity_id, link_type, created_at) + VALUES (?1, ?2, ?3, ?4)", + rusqlite::params![context_id, entity_id, link_type, Utc::now().to_rfc3339()], + )?; + tx.commit()?; + log_op(conn, "attach_entity", context_id, Some(entity_id), "ok"); + Ok(()) +} + +/// Remove a link between a context and an entity. +pub fn detach_entity( + conn: &mut Connection, + context_id: &str, + entity_id: &str, + link_type: Option<&str>, +) -> anyhow::Result { + let tx = conn.transaction()?; + let rows = match link_type { + Some(lt) => tx.execute( + "DELETE FROM context_entity_links + WHERE context_id = ?1 AND entity_id = ?2 AND link_type = ?3", + rusqlite::params![context_id, entity_id, lt], + )?, + None => tx.execute( + "DELETE FROM context_entity_links + WHERE context_id = ?1 AND entity_id = ?2", + rusqlite::params![context_id, entity_id], + )?, + }; + tx.commit()?; + log_op(conn, "detach_entity", context_id, Some(entity_id), "ok"); + Ok(rows > 0) +} + +/// List all entities linked to a context. +pub fn list_linked_entities( + conn: &Connection, + context_id: &str, +) -> anyhow::Result> { + let mut stmt = conn.prepare( + "SELECT entity_id, link_type, created_at + FROM context_entity_links + WHERE context_id = ?1 + ORDER BY created_at DESC", + )?; + let rows = stmt.query_map([context_id], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?, row.get::<_, String>(2)?)) + })?; + rows.collect::, _>>().map_err(Into::into) +} + +/// List all contexts linked to an entity. +pub fn list_linking_contexts( + conn: &Connection, + entity_id: &str, +) -> anyhow::Result> { + let mut stmt = conn.prepare( + "SELECT context_id, link_type, created_at + FROM context_entity_links + WHERE entity_id = ?1 + ORDER BY created_at DESC", + )?; + let rows = stmt.query_map([entity_id], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?, row.get::<_, String>(2)?)) + })?; + rows.collect::, _>>().map_err(Into::into) +} + +/// Search memories by keyword (LIKE query) within a context or globally. +pub fn search_memories( + conn: &Connection, + context_id: Option<&str>, + query: &str, + limit: usize, +) -> anyhow::Result> { + let pattern = format!("%{}%", query.replace('%', "\\%").replace('_', "\\_")); + let row_mapper = |row: &rusqlite::Row| { + let created_at = parse_datetime(row.get(4)?).map_err(|e| { + rusqlite::Error::FromSqlConversionFailure( + 4, + rusqlite::types::Type::Text, + Box::new(std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())), + ) + })?; + Ok(AgentMemory { + id: row.get(0)?, + context_id: row.get(1)?, + memory_type: row.get(2)?, + content: row.get(3)?, + created_at, + }) + }; + if let Some(cid) = context_id { + let mut stmt = conn.prepare( + "SELECT id, context_id, memory_type, content, created_at + FROM agent_memories + WHERE context_id = ?1 AND content LIKE ?2 + ORDER BY created_at DESC + LIMIT ?3", + )?; + let rows = stmt.query_map(rusqlite::params![cid, &pattern, limit as i64], row_mapper)?; + return rows.collect::, _>>().map_err(Into::into); + } + let mut stmt = conn.prepare( + "SELECT id, context_id, memory_type, content, created_at + FROM agent_memories + WHERE content LIKE ?1 + ORDER BY created_at DESC + LIMIT ?2", + )?; + let rows = stmt.query_map(rusqlite::params![&pattern, limit as i64], row_mapper)?; + rows.collect::, _>>().map_err(Into::into) +} + +/// Write an agent-context operation to the oplog for audit/compensation. +fn log_op( + conn: &rusqlite::Connection, + _operation: &str, + context_id: &str, + details: Option<&str>, + status: &str, +) { + let _ = crate::registry::workspace::save_oplog( + conn, + &crate::registry::OplogEntry { + id: None, + event_type: crate::registry::OplogEventType::AgentContext, + repo_id: Some(context_id.to_string()), + details: details.map(|s| s.to_string()), + status: status.to_string(), + timestamp: chrono::Utc::now(), + duration_ms: None, + event_version: 1, + }, + ); +} + // --------------------------------------------------------------------------- // Helpers // --------------------------------------------------------------------------- @@ -304,4 +458,28 @@ mod tests { let mems = list_memories(&conn, "ctx-cascade").unwrap(); assert!(mems.is_empty()); } + + #[test] + fn test_entity_links() { + let mut conn = WorkspaceRegistry::init_in_memory().unwrap(); + upsert_context(&mut conn, "ctx-links", "Links", None).unwrap(); + + attach_entity(&mut conn, "ctx-links", "repo-1", "linked_repo").unwrap(); + attach_entity(&mut conn, "ctx-links", "skill-1", "linked_skill").unwrap(); + attach_entity(&mut conn, "ctx-links", "repo-1", "linked_repo").unwrap(); // idempotent + + let linked = list_linked_entities(&conn, "ctx-links").unwrap(); + assert_eq!(linked.len(), 2); + + let contexts = list_linking_contexts(&conn, "repo-1").unwrap(); + assert_eq!(contexts.len(), 1); + assert_eq!(contexts[0].0, "ctx-links"); + + assert!(detach_entity(&mut conn, "ctx-links", "repo-1", Some("linked_repo")).unwrap()); + let linked2 = list_linked_entities(&conn, "ctx-links").unwrap(); + assert_eq!(linked2.len(), 1); + + assert!(detach_entity(&mut conn, "ctx-links", "skill-1", None).unwrap()); + assert!(list_linked_entities(&conn, "ctx-links").unwrap().is_empty()); + } } diff --git a/src/registry/migrate.rs b/src/registry/migrate.rs index efb85e3..f22b5d6 100644 --- a/src/registry/migrate.rs +++ b/src/registry/migrate.rs @@ -4,7 +4,7 @@ use super::*; use crate::storage::StorageBackend; use std::path::PathBuf; -pub const CURRENT_SCHEMA_VERSION: i32 = 31; +pub const CURRENT_SCHEMA_VERSION: i32 = 32; impl WorkspaceRegistry { pub fn db_path() -> anyhow::Result { diff --git a/src/registry/migrations/mod.rs b/src/registry/migrations/mod.rs index aafd75e..450949a 100644 --- a/src/registry/migrations/mod.rs +++ b/src/registry/migrations/mod.rs @@ -33,6 +33,7 @@ pub mod v28_embedding_precision; pub mod v29_compensation_log; pub mod v30_code_symbol_attributes; pub mod v31_agent_contexts; +pub mod v32_context_links; pub fn run_all(conn: &mut Connection) -> anyhow::Result<()> { let user_version: i32 = conn.query_row("PRAGMA user_version", [], |row| row.get(0))?; @@ -130,6 +131,9 @@ pub fn run_all(conn: &mut Connection) -> anyhow::Result<()> { if user_version < 31 { v31_agent_contexts::run(conn)?; } + if user_version < 32 { + v32_context_links::run(conn)?; + } Ok(()) } diff --git a/src/registry/migrations/v32_context_links.rs b/src/registry/migrations/v32_context_links.rs new file mode 100644 index 0000000..e939a68 --- /dev/null +++ b/src/registry/migrations/v32_context_links.rs @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2026 juice094 +use rusqlite::Connection; + +pub fn run(conn: &Connection) -> anyhow::Result<()> { + conn.execute( + "CREATE TABLE IF NOT EXISTS context_entity_links ( + context_id TEXT NOT NULL, + entity_id TEXT NOT NULL, + link_type TEXT NOT NULL DEFAULT 'linked', + created_at TEXT NOT NULL, + PRIMARY KEY (context_id, entity_id, link_type) + )", + [], + )?; + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_context_links_entity ON context_entity_links(entity_id)", + [], + )?; + conn.execute("PRAGMA user_version = 32", [])?; + Ok(()) +} diff --git a/src/registry/test_helpers.rs b/src/registry/test_helpers.rs index a23584f..b20d239 100644 --- a/src/registry/test_helpers.rs +++ b/src/registry/test_helpers.rs @@ -381,6 +381,15 @@ CREATE TABLE IF NOT EXISTS agent_memories ( FOREIGN KEY (context_id) REFERENCES agent_contexts(id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS idx_agent_memories_context ON agent_memories(context_id); + +CREATE TABLE IF NOT EXISTS context_entity_links ( + context_id TEXT NOT NULL, + entity_id TEXT NOT NULL, + link_type TEXT NOT NULL DEFAULT 'linked', + created_at TEXT NOT NULL, + PRIMARY KEY (context_id, entity_id, link_type) +); +CREATE INDEX IF NOT EXISTS idx_context_links_entity ON context_entity_links(entity_id); "#; #[cfg(test)] diff --git a/src/skill_runtime/executor.rs b/src/skill_runtime/executor.rs index 68b6fba..b20af52 100644 --- a/src/skill_runtime/executor.rs +++ b/src/skill_runtime/executor.rs @@ -65,6 +65,45 @@ pub fn run_skill( cmd.env("DEVBASE_SKILL_ID", &skill.id); cmd.env("DEVBASE_HOME", devbase_home()?); + // P2-B: Inject active session context memories if available + if let Some(ctx_id) = resolve_active_context() { + if let Ok(memories) = crate::registry::agent_context::list_memories(conn, &ctx_id) + && !memories.is_empty() + { + let mem_json: Vec = memories + .iter() + .map(|m| { + serde_json::json!({ + "type": m.memory_type, + "content": m.content, + }) + }) + .collect(); + cmd.env("DEVBASE_ACTIVE_CONTEXT", &ctx_id); + cmd.env( + "DEVBASE_CONTEXT_MEMORIES", + serde_json::to_string(&mem_json).unwrap_or_default(), + ); + } + if let Ok(linked) = crate::registry::agent_context::list_linked_entities(conn, &ctx_id) + && !linked.is_empty() + { + let links_json: Vec = linked + .iter() + .map(|(eid, ltype, _)| { + serde_json::json!({ + "entity_id": eid, + "link_type": ltype, + }) + }) + .collect(); + cmd.env( + "DEVBASE_CONTEXT_LINKS", + serde_json::to_string(&links_json).unwrap_or_default(), + ); + } + } + // Build JSON input from key=value args and pass via stdin let mut json_args = serde_json::Map::new(); for arg in args { @@ -215,6 +254,22 @@ pub(crate) fn check_hard_vetoes_for_skill( )) } +/// Resolve the active agent context ID from environment or state file. +fn resolve_active_context() -> Option { + if let Ok(ctx) = std::env::var("DEVBASE_ACTIVE_CONTEXT") + && !ctx.is_empty() + { + return Some(ctx); + } + let state_file = crate::registry::WorkspaceRegistry::workspace_dir() + .ok()? + .join(".active_context"); + std::fs::read_to_string(state_file) + .ok() + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) +} + fn resolve_interpreter(path: &std::path::Path) -> (Option, String) { let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); let path_str = path.to_string_lossy().to_string(); From 62f2ceea1c9ea0d814ca3ac8a6d97c10a3a51d96 Mon Sep 17 00:00:00 2001 From: juice094 <160722440+juice094@users.noreply.github.com> Date: Tue, 12 May 2026 21:45:48 +0800 Subject: [PATCH 17/17] feat(agent-contexts): v0.16.1 Workflow-Session binding MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Schema v33: workflow_executions ADD COLUMN context_id (v33_workflow_context.rs) - Extract resolve_active_context() to agent_context module for reuse - workflow::state::create_execution now accepts context_id parameter - workflow executor and CLI auto-bind active session to executions - Registry CRUD: list_executions_by_context for session workflow audit - MCP tool: devkit_session_workflows — query workflow history by session - AGENTS.md: 57 → 58 tools, Schema 32 → 33 - Quality: clippy 0 warnings, fmt clean, 442 passed / 0 failed --- AGENTS.md | 2 +- src/commands/workflow.rs | 2 + src/mcp/mod.rs | 6 ++ src/mcp/tests.rs | 2 +- src/mcp/tools/mod.rs | 1 + src/mcp/tools/session.rs | 65 +++++++++++++++++++ src/registry/agent_context.rs | 16 +++++ src/registry/migrate.rs | 2 +- src/registry/migrations/mod.rs | 4 ++ .../migrations/v33_workflow_context.rs | 20 ++++++ src/registry/test_helpers.rs | 3 +- src/skill_runtime/executor.rs | 18 +---- src/workflow/mod.rs | 4 +- src/workflow/state.rs | 38 +++++++++-- 14 files changed, 156 insertions(+), 27 deletions(-) create mode 100644 src/registry/migrations/v33_workflow_context.rs diff --git a/AGENTS.md b/AGENTS.md index 5486e56..2c29705 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -22,7 +22,7 @@ Skill Runtime 全生命周期已落地(含依赖管理 Schema v15),Schema - **Workspace**:`%LOCALAPPDATA%\devbase\workspace/` —— 文件系统 = source of truth - `vault/` —— PARA 结构:00-Inbox, 01-Projects, 02-Areas, 03-Resources, 04-Archives, 99-Meta - `assets/` —— 二进制资源 -- **MCP Server**:stdio only,**57 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 8 个 Agent Context tools + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` +- **MCP Server**:stdio only,**58 个 tools**(含 5 个 vault tools + 8 个代码分析工具 + 4 个 embedding/搜索工具 + 4 个 Skill Runtime tools + 3 个 Workflow/评分 tools + 1 个报告工具 + 1 个 arXiv 工具 + 2 个 KnownLimit tools + 3 个 Relation tools + 2 个 Agent 状态工具 + 9 个 Agent Context tools + 1 个 streaming index 工具 + 1 个 oplog 工具);配置见 `mcp.json` - **Kimi CLI 集成**:MCP server 已通过 `kimi mcp add` 注册,端到端验证通过(`kimi --print` 成功调用 `devkit_health`);项目级 skill 位于 `.kimi/skills/devbase-project/SKILL.md` - **统一节点模型**:`core::node::{Node, NodeType, Edge}` —— GitRepo / VaultNote / Asset / ExternalLink - **当前测试**:490+ workspace passed / 0 failed / 4 ignored(主 crate 390 + symbol-links 4 + sync-protocol 12 + core-types 3 + syncthing-client 2 + vault-frontmatter 5 + vault-wikilink 5 + workflow-interpolate 9 + workflow-model 2 + registry-health 3 + registry-metrics 4 + registry-workspace 5 + embedding 5 + skill-runtime-types 7 + skill-runtime-parser 3 + 其他 crates ~30);11/11 passed(integration `tests/cli.rs`) diff --git a/src/commands/workflow.rs b/src/commands/workflow.rs index ee2a8c3..030f70a 100644 --- a/src/commands/workflow.rs +++ b/src/commands/workflow.rs @@ -82,10 +82,12 @@ pub fn run_workflow( )); } } + let active_ctx = crate::registry::agent_context::resolve_active_context(); let exec_id = crate::workflow::create_execution( &conn, &workflow_id, &serde_json::to_string(&input_map)?, + active_ctx.as_deref(), )?; crate::workflow::update_execution( &conn, diff --git a/src/mcp/mod.rs b/src/mcp/mod.rs index b49b91e..3ebfbf6 100644 --- a/src/mcp/mod.rs +++ b/src/mcp/mod.rs @@ -108,6 +108,7 @@ pub enum McpToolEnum { SessionActivate(DevkitSessionActivateTool), SessionSearch(DevkitSessionSearchTool), SessionCapture(DevkitSessionCaptureTool), + SessionWorkflows(DevkitSessionWorkflowsTool), WorkflowList(DevkitWorkflowListTool), WorkflowRun(DevkitWorkflowRunTool), WorkflowStatus(DevkitWorkflowStatusTool), @@ -193,6 +194,7 @@ impl McpToolEnum { McpToolEnum::SessionActivate(_) => ToolTier::Beta, McpToolEnum::SessionSearch(_) => ToolTier::Beta, McpToolEnum::SessionCapture(_) => ToolTier::Beta, + McpToolEnum::SessionWorkflows(_) => ToolTier::Beta, McpToolEnum::WorkflowList(_) => ToolTier::Beta, McpToolEnum::WorkflowRun(_) => ToolTier::Beta, McpToolEnum::WorkflowStatus(_) => ToolTier::Beta, @@ -257,6 +259,7 @@ impl McpTool for McpToolEnum { McpToolEnum::SessionActivate(t) => t.name(), McpToolEnum::SessionSearch(t) => t.name(), McpToolEnum::SessionCapture(t) => t.name(), + McpToolEnum::SessionWorkflows(t) => t.name(), McpToolEnum::WorkflowList(t) => t.name(), McpToolEnum::WorkflowRun(t) => t.name(), McpToolEnum::WorkflowStatus(t) => t.name(), @@ -319,6 +322,7 @@ impl McpTool for McpToolEnum { McpToolEnum::SessionActivate(t) => t.schema(), McpToolEnum::SessionSearch(t) => t.schema(), McpToolEnum::SessionCapture(t) => t.schema(), + McpToolEnum::SessionWorkflows(t) => t.schema(), McpToolEnum::WorkflowList(t) => t.schema(), McpToolEnum::WorkflowRun(t) => t.schema(), McpToolEnum::WorkflowStatus(t) => t.schema(), @@ -385,6 +389,7 @@ impl McpTool for McpToolEnum { McpToolEnum::SessionActivate(t) => t.invoke(args, ctx).await, McpToolEnum::SessionSearch(t) => t.invoke(args, ctx).await, McpToolEnum::SessionCapture(t) => t.invoke(args, ctx).await, + McpToolEnum::SessionWorkflows(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowList(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowRun(t) => t.invoke(args, ctx).await, McpToolEnum::WorkflowStatus(t) => t.invoke(args, ctx).await, @@ -641,6 +646,7 @@ pub fn build_server_with_tiers(tiers: Option<&HashSet>) -> McpServer { McpToolEnum::SessionActivate(DevkitSessionActivateTool), McpToolEnum::SessionSearch(DevkitSessionSearchTool), McpToolEnum::SessionCapture(DevkitSessionCaptureTool), + McpToolEnum::SessionWorkflows(DevkitSessionWorkflowsTool), McpToolEnum::WorkflowList(DevkitWorkflowListTool), McpToolEnum::WorkflowRun(DevkitWorkflowRunTool), McpToolEnum::WorkflowStatus(DevkitWorkflowStatusTool), diff --git a/src/mcp/tests.rs b/src/mcp/tests.rs index e925136..4fda4d9 100644 --- a/src/mcp/tests.rs +++ b/src/mcp/tests.rs @@ -39,7 +39,7 @@ async fn test_tools_list() { let (mut ctx, _tmp) = test_ctx(); let resp = server.handle_request(req, &mut ctx).await.unwrap(); let tools = resp.get("result").unwrap().get("tools").unwrap().as_array().unwrap(); - assert_eq!(tools.len(), 57); + assert_eq!(tools.len(), 58); let names: Vec<&str> = tools.iter().map(|t| t.get("name").unwrap().as_str().unwrap()).collect(); assert!(names.contains(&"devkit_session_save")); assert!(names.contains(&"devkit_session_list")); diff --git a/src/mcp/tools/mod.rs b/src/mcp/tools/mod.rs index 9f5ccc5..5cb265d 100644 --- a/src/mcp/tools/mod.rs +++ b/src/mcp/tools/mod.rs @@ -60,5 +60,6 @@ mod tests { let _ = super::session::DevkitSessionActivateTool; let _ = super::session::DevkitSessionSearchTool; let _ = super::session::DevkitSessionCaptureTool; + let _ = super::session::DevkitSessionWorkflowsTool; } } diff --git a/src/mcp/tools/session.rs b/src/mcp/tools/session.rs index 3638cb2..c598322 100644 --- a/src/mcp/tools/session.rs +++ b/src/mcp/tools/session.rs @@ -591,6 +591,70 @@ This is a lightweight append-only operation. No validation is performed on conte } } +#[derive(Clone)] +pub struct DevkitSessionWorkflowsTool; + +impl McpTool for DevkitSessionWorkflowsTool { + fn name(&self) -> &'static str { + "devkit_session_workflows" + } + + fn schema(&self) -> serde_json::Value { + json!({ + "description": r#"List workflow executions associated with an agent session. + +Use this when the user wants to: +- Review what automated workflows were run in a project context +- Audit the execution history of a session +- Check workflow status for a specific project"#, + "inputSchema": { + "type": "object", + "properties": { + "context_id": { "type": "string", "description": "Session ID" }, + "limit": { "type": "integer", "description": "Maximum results", "default": 20 } + }, + "required": ["context_id"] + } + }) + } + + async fn invoke( + &self, + args: serde_json::Value, + ctx: &mut AppContext, + ) -> anyhow::Result { + let context_id = args.get("context_id").and_then(|v| v.as_str()).unwrap_or(""); + let limit = args.get("limit").and_then(|v| v.as_i64()).unwrap_or(20); + if context_id.is_empty() { + anyhow::bail!("Missing required argument: context_id"); + } + + let conn = ctx.conn()?; + let executions = + crate::workflow::state::list_executions_by_context(&conn, context_id, limit)?; + let results: Vec = executions + .into_iter() + .map(|(id, wf_id, status, current_step, started_at, duration_ms)| { + json!({ + "execution_id": id, + "workflow_id": wf_id, + "status": status, + "current_step": current_step, + "started_at": started_at, + "duration_ms": duration_ms, + }) + }) + .collect(); + + Ok(json!({ + "success": true, + "context_id": context_id, + "count": results.len(), + "executions": results, + })) + } +} + #[cfg(test)] mod tests { use super::*; @@ -606,6 +670,7 @@ mod tests { assert_eq!(DevkitSessionActivateTool.name(), "devkit_session_activate"); assert_eq!(DevkitSessionSearchTool.name(), "devkit_session_search"); assert_eq!(DevkitSessionCaptureTool.name(), "devkit_session_capture"); + assert_eq!(DevkitSessionWorkflowsTool.name(), "devkit_session_workflows"); } #[test] diff --git a/src/registry/agent_context.rs b/src/registry/agent_context.rs index 2a716df..9978c9a 100644 --- a/src/registry/agent_context.rs +++ b/src/registry/agent_context.rs @@ -361,6 +361,22 @@ fn log_op( ); } +/// Resolve the active agent context ID from environment or workspace state file. +pub fn resolve_active_context() -> Option { + if let Ok(ctx) = std::env::var("DEVBASE_ACTIVE_CONTEXT") + && !ctx.is_empty() + { + return Some(ctx); + } + let state_file = crate::registry::WorkspaceRegistry::workspace_dir() + .ok()? + .join(".active_context"); + std::fs::read_to_string(state_file) + .ok() + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) +} + // --------------------------------------------------------------------------- // Helpers // --------------------------------------------------------------------------- diff --git a/src/registry/migrate.rs b/src/registry/migrate.rs index f22b5d6..8dc5b8b 100644 --- a/src/registry/migrate.rs +++ b/src/registry/migrate.rs @@ -4,7 +4,7 @@ use super::*; use crate::storage::StorageBackend; use std::path::PathBuf; -pub const CURRENT_SCHEMA_VERSION: i32 = 32; +pub const CURRENT_SCHEMA_VERSION: i32 = 33; impl WorkspaceRegistry { pub fn db_path() -> anyhow::Result { diff --git a/src/registry/migrations/mod.rs b/src/registry/migrations/mod.rs index 450949a..903d985 100644 --- a/src/registry/migrations/mod.rs +++ b/src/registry/migrations/mod.rs @@ -34,6 +34,7 @@ pub mod v29_compensation_log; pub mod v30_code_symbol_attributes; pub mod v31_agent_contexts; pub mod v32_context_links; +pub mod v33_workflow_context; pub fn run_all(conn: &mut Connection) -> anyhow::Result<()> { let user_version: i32 = conn.query_row("PRAGMA user_version", [], |row| row.get(0))?; @@ -134,6 +135,9 @@ pub fn run_all(conn: &mut Connection) -> anyhow::Result<()> { if user_version < 32 { v32_context_links::run(conn)?; } + if user_version < 33 { + v33_workflow_context::run(conn)?; + } Ok(()) } diff --git a/src/registry/migrations/v33_workflow_context.rs b/src/registry/migrations/v33_workflow_context.rs new file mode 100644 index 0000000..3f4f8d5 --- /dev/null +++ b/src/registry/migrations/v33_workflow_context.rs @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2026 juice094 +use rusqlite::Connection; + +pub fn run(conn: &Connection) -> anyhow::Result<()> { + let cols: Vec = { + let mut stmt = conn.prepare("PRAGMA table_info(workflow_executions)")?; + let rows = stmt.query_map([], |row| row.get::<_, String>(1))?; + rows.filter_map(Result::ok).collect() + }; + if !cols.iter().any(|c| c == "context_id") { + conn.execute("ALTER TABLE workflow_executions ADD COLUMN context_id TEXT", [])?; + } + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_workflow_execs_context ON workflow_executions(context_id)", + [], + )?; + conn.execute("PRAGMA user_version = 33", [])?; + Ok(()) +} diff --git a/src/registry/test_helpers.rs b/src/registry/test_helpers.rs index b20d239..9a68d4f 100644 --- a/src/registry/test_helpers.rs +++ b/src/registry/test_helpers.rs @@ -275,7 +275,8 @@ CREATE TABLE IF NOT EXISTS workflow_executions ( current_step TEXT, started_at TEXT NOT NULL, finished_at TEXT, - duration_ms INTEGER + duration_ms INTEGER, + context_id TEXT ); -- v18: Known Limits (L3 risk layer) diff --git a/src/skill_runtime/executor.rs b/src/skill_runtime/executor.rs index b20af52..47af436 100644 --- a/src/skill_runtime/executor.rs +++ b/src/skill_runtime/executor.rs @@ -66,7 +66,7 @@ pub fn run_skill( cmd.env("DEVBASE_HOME", devbase_home()?); // P2-B: Inject active session context memories if available - if let Some(ctx_id) = resolve_active_context() { + if let Some(ctx_id) = crate::registry::agent_context::resolve_active_context() { if let Ok(memories) = crate::registry::agent_context::list_memories(conn, &ctx_id) && !memories.is_empty() { @@ -254,22 +254,6 @@ pub(crate) fn check_hard_vetoes_for_skill( )) } -/// Resolve the active agent context ID from environment or state file. -fn resolve_active_context() -> Option { - if let Ok(ctx) = std::env::var("DEVBASE_ACTIVE_CONTEXT") - && !ctx.is_empty() - { - return Some(ctx); - } - let state_file = crate::registry::WorkspaceRegistry::workspace_dir() - .ok()? - .join(".active_context"); - std::fs::read_to_string(state_file) - .ok() - .map(|s| s.trim().to_string()) - .filter(|s| !s.is_empty()) -} - fn resolve_interpreter(path: &std::path::Path) -> (Option, String) { let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); let path_str = path.to_string_lossy().to_string(); diff --git a/src/workflow/mod.rs b/src/workflow/mod.rs index be45150..862f32b 100644 --- a/src/workflow/mod.rs +++ b/src/workflow/mod.rs @@ -84,7 +84,9 @@ impl crate::clients::WorkflowClient for AppContext { }; let inputs_json = inputs.to_string(); - let exec_id = state::create_execution(&conn, workflow_id, &inputs_json)?; + let active_ctx = crate::registry::agent_context::resolve_active_context(); + let exec_id = + state::create_execution(&conn, workflow_id, &inputs_json, active_ctx.as_deref())?; state::update_execution(&conn, exec_id, &model::ExecutionStatus::Running, None, None)?; let pool = self.pool(); diff --git a/src/workflow/state.rs b/src/workflow/state.rs index 7c8b150..e50bb26 100644 --- a/src/workflow/state.rs +++ b/src/workflow/state.rs @@ -61,16 +61,44 @@ pub fn create_execution( conn: &Connection, workflow_id: &str, inputs_json: &str, + context_id: Option<&str>, ) -> anyhow::Result { let now = chrono::Utc::now().to_rfc3339(); conn.execute( - "INSERT INTO workflow_executions (workflow_id, inputs_json, status, current_step, started_at) - VALUES (?1, ?2, 'Pending', NULL, ?3)", - params![workflow_id, inputs_json, now], + "INSERT INTO workflow_executions (workflow_id, inputs_json, status, current_step, started_at, context_id) + VALUES (?1, ?2, 'Pending', NULL, ?3, ?4)", + params![workflow_id, inputs_json, now, context_id], )?; Ok(conn.last_insert_rowid()) } +/// List workflow executions bound to a session context. +#[allow(clippy::type_complexity)] +pub fn list_executions_by_context( + conn: &Connection, + context_id: &str, + limit: i64, +) -> anyhow::Result, String, Option)>> { + let mut stmt = conn.prepare( + "SELECT id, workflow_id, status, current_step, started_at, duration_ms + FROM workflow_executions + WHERE context_id = ?1 + ORDER BY started_at DESC + LIMIT ?2", + )?; + let rows = stmt.query_map(params![context_id, limit], |row| { + Ok(( + row.get::<_, i64>(0)?, + row.get::<_, String>(1)?, + row.get::<_, String>(2)?, + row.get::<_, Option>(3)?, + row.get::<_, String>(4)?, + row.get::<_, Option>(5)?, + )) + })?; + rows.collect::, _>>().map_err(Into::into) +} + pub fn update_execution( conn: &Connection, exec_id: i64, @@ -162,7 +190,7 @@ mod tests { let conn = WorkspaceRegistry::init_in_memory().unwrap(); let wf = dummy_wf(); save_workflow(&conn, &wf).unwrap(); - let exec_id = create_execution(&conn, "test-wf", r#"{"repo_path":"/tmp"}"#).unwrap(); + let exec_id = create_execution(&conn, "test-wf", r#"{"repo_path":"/tmp"}"#, None).unwrap(); assert!(exec_id > 0); update_execution(&conn, exec_id, &ExecutionStatus::Running, Some("step1"), None).unwrap(); let exec = get_execution(&conn, exec_id).unwrap().unwrap(); @@ -204,7 +232,7 @@ mod tests { validate_workflow(&wf).unwrap(); save_workflow(&conn, &wf).unwrap(); - let exec_id = create_execution(&conn, "e2e-wf", "{}").unwrap(); + let exec_id = create_execution(&conn, "e2e-wf", "{}", None).unwrap(); update_execution(&conn, exec_id, &ExecutionStatus::Running, None, None).unwrap(); // Execution should fail because skill does not exist