diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3fd039ca..d71a83ab 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -60,4 +60,4 @@ jobs: cargo fmt --all -- --check npm run generate-types:check cargo test --workspace - cargo clippy --all --all-targets --all-features + cargo clippy --all --all-targets -- -D warnings diff --git a/crates/db/src/models/project.rs b/crates/db/src/models/project.rs index 1fc44e40..059fe05c 100644 --- a/crates/db/src/models/project.rs +++ b/crates/db/src/models/project.rs @@ -174,6 +174,7 @@ impl Project { .await } + #[allow(clippy::too_many_arguments)] pub async fn update( pool: &SqlitePool, id: Uuid, diff --git a/crates/executors/src/actions/coding_agent_follow_up.rs b/crates/executors/src/actions/coding_agent_follow_up.rs index 0a23f134..3211d9e8 100644 --- a/crates/executors/src/actions/coding_agent_follow_up.rs +++ b/crates/executors/src/actions/coding_agent_follow_up.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::path::Path; use async_trait::async_trait; use command_group::AsyncGroupChild; @@ -30,7 +30,7 @@ impl CodingAgentFollowUpRequest { #[async_trait] impl Executable for CodingAgentFollowUpRequest { - async fn spawn(&self, current_dir: &PathBuf) -> Result { + async fn spawn(&self, current_dir: &Path) -> Result { let executor_profile_id = self.get_executor_profile_id(); let agent = ExecutorConfigs::get_cached() .get_coding_agent(&executor_profile_id) diff --git a/crates/executors/src/actions/coding_agent_initial.rs b/crates/executors/src/actions/coding_agent_initial.rs index 068e74c2..6fafa888 100644 --- a/crates/executors/src/actions/coding_agent_initial.rs +++ b/crates/executors/src/actions/coding_agent_initial.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::path::Path; use async_trait::async_trait; use command_group::AsyncGroupChild; @@ -22,7 +22,7 @@ pub struct CodingAgentInitialRequest { #[async_trait] impl Executable for CodingAgentInitialRequest { - async fn spawn(&self, current_dir: &PathBuf) -> Result { + async fn spawn(&self, current_dir: &Path) -> Result { let executor_profile_id = self.executor_profile_id.clone(); let agent = ExecutorConfigs::get_cached() .get_coding_agent(&executor_profile_id) diff --git a/crates/executors/src/actions/mod.rs b/crates/executors/src/actions/mod.rs index c84042b4..7b51f625 100644 --- a/crates/executors/src/actions/mod.rs +++ b/crates/executors/src/actions/mod.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::path::Path; use async_trait::async_trait; use command_group::AsyncGroupChild; @@ -41,20 +41,20 @@ impl ExecutorAction { &self.typ } - pub fn next_action(&self) -> Option<&Box> { - self.next_action.as_ref() + pub fn next_action(&self) -> Option<&ExecutorAction> { + self.next_action.as_deref() } } #[async_trait] #[enum_dispatch(ExecutorActionType)] pub trait Executable { - async fn spawn(&self, current_dir: &PathBuf) -> Result; + async fn spawn(&self, current_dir: &Path) -> Result; } #[async_trait] impl Executable for ExecutorAction { - async fn spawn(&self, current_dir: &PathBuf) -> Result { + async fn spawn(&self, current_dir: &Path) -> Result { self.typ.spawn(current_dir).await } } diff --git a/crates/executors/src/actions/script.rs b/crates/executors/src/actions/script.rs index 6c855860..fe64738d 100644 --- a/crates/executors/src/actions/script.rs +++ b/crates/executors/src/actions/script.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::path::Path; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -30,7 +30,7 @@ pub struct ScriptRequest { #[async_trait] impl Executable for ScriptRequest { - async fn spawn(&self, current_dir: &PathBuf) -> Result { + async fn spawn(&self, current_dir: &Path) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); let mut command = Command::new(shell_cmd); command diff --git a/crates/executors/src/executors/amp.rs b/crates/executors/src/executors/amp.rs index 94c7db56..d9b14524 100644 --- a/crates/executors/src/executors/amp.rs +++ b/crates/executors/src/executors/amp.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, process::Stdio, sync::Arc}; +use std::{path::Path, process::Stdio, sync::Arc}; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -42,7 +42,7 @@ impl Amp { impl StandardCodingAgentExecutor for Amp { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); @@ -73,7 +73,7 @@ impl StandardCodingAgentExecutor for Amp { async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, session_id: &str, ) -> Result { @@ -108,7 +108,7 @@ impl StandardCodingAgentExecutor for Amp { Ok(child) } - fn normalize_logs(&self, msg_store: Arc, current_dir: &PathBuf) { + fn normalize_logs(&self, msg_store: Arc, current_dir: &Path) { let entry_index_provider = EntryIndexProvider::start_from(&msg_store); // Process stdout logs (Amp's stream JSON output) using Claude's log processor diff --git a/crates/executors/src/executors/claude.rs b/crates/executors/src/executors/claude.rs index 005fa928..a8e253e8 100644 --- a/crates/executors/src/executors/claude.rs +++ b/crates/executors/src/executors/claude.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, process::Stdio, sync::Arc}; +use std::{path::Path, process::Stdio, sync::Arc}; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -76,7 +76,7 @@ impl ClaudeCode { impl StandardCodingAgentExecutor for ClaudeCode { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); @@ -113,7 +113,7 @@ impl StandardCodingAgentExecutor for ClaudeCode { async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, session_id: &str, ) -> Result { @@ -151,7 +151,7 @@ impl StandardCodingAgentExecutor for ClaudeCode { Ok(child) } - fn normalize_logs(&self, msg_store: Arc, current_dir: &PathBuf) { + fn normalize_logs(&self, msg_store: Arc, current_dir: &Path) { let entry_index_provider = EntryIndexProvider::start_from(&msg_store); // Process stdout logs (Claude's JSON output) @@ -229,11 +229,11 @@ impl ClaudeLogProcessor { /// Process raw logs and convert them to normalized entries with patches pub fn process_logs( msg_store: Arc, - current_dir: &PathBuf, + current_dir: &Path, entry_index_provider: EntryIndexProvider, strategy: HistoryStrategy, ) { - let current_dir_clone = current_dir.clone(); + let current_dir_clone = current_dir.to_owned(); tokio::spawn(async move { let mut stream = msg_store.history_plus_stream(); let mut buffer = String::new(); @@ -546,8 +546,8 @@ impl ClaudeLogProcessor { } _ => { // Convert to normalized entries and create patches for other kinds - for entry in processor - .to_normalized_entries(&claude_json, &worktree_path) + for entry in + processor.normalize_entries(&claude_json, &worktree_path) { let patch_id = entry_index_provider.next(); let patch = ConversationPatch::add_normalized_entry( @@ -611,7 +611,7 @@ impl ClaudeLogProcessor { } /// Convert Claude JSON to normalized entries - fn to_normalized_entries( + fn normalize_entries( &mut self, claude_json: &ClaudeJson, worktree_path: &str, @@ -1353,13 +1353,13 @@ mod tests { Some("abc123".to_string()) ); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); assert_eq!(entries.len(), 0); let assistant_json = r#" {"type":"assistant","message":{"type":"message","role":"assistant","model":"claude-sonnet-4-20250514","content":[{"type":"text","text":"Hi! I'm Claude Code."}]}}"#; let parsed: ClaudeJson = serde_json::from_str(assistant_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); assert_eq!(entries.len(), 2); assert!(matches!( @@ -1377,7 +1377,7 @@ mod tests { let assistant_json = r#"{"type":"assistant","message":{"role":"assistant","content":[{"type":"text","text":"Hello world"}]},"session_id":"abc123"}"#; let parsed: ClaudeJson = serde_json::from_str(assistant_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); assert_eq!(entries.len(), 1); assert!(matches!( entries[0].entry_type, @@ -1391,7 +1391,7 @@ mod tests { let result_json = r#"{"type":"result","subtype":"success","is_error":false,"duration_ms":6059,"result":"Final result"}"#; let parsed: ClaudeJson = serde_json::from_str(result_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); assert_eq!(entries.len(), 0); // Should be ignored like in old implementation } @@ -1400,7 +1400,7 @@ mod tests { let thinking_json = r#"{"type":"assistant","message":{"role":"assistant","content":[{"type":"thinking","thinking":"Let me think about this..."}]}}"#; let parsed: ClaudeJson = serde_json::from_str(thinking_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); assert_eq!(entries.len(), 1); assert!(matches!( entries[0].entry_type, @@ -1570,7 +1570,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(assistant_with_create).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); match &entries[0].entry_type { NormalizedEntryType::ToolUse { action_type, .. } => match action_type { @@ -1591,7 +1591,7 @@ mod tests { } }"#; let parsed_edit: ClaudeJson = serde_json::from_str(assistant_with_edit).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed_edit, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed_edit, "/tmp/work"); assert_eq!(entries.len(), 1); match &entries[0].entry_type { NormalizedEntryType::ToolUse { action_type, .. } => match action_type { @@ -1615,7 +1615,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(oracle_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); assert_eq!(entries[0].content, "Oracle: `Assess project status`"); @@ -1630,7 +1630,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(mermaid_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); assert_eq!(entries[0].content, "Mermaid diagram"); @@ -1645,7 +1645,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(csa_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); assert_eq!(entries[0].content, "Codebase search: `TODO markers`"); @@ -1660,7 +1660,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(undo_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); assert_eq!(entries[0].content, "Undo edit: `README.md`"); } @@ -1678,7 +1678,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(bash_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); // Content should display the command in backticks assert_eq!(entries[0].content, "`echo hello`"); @@ -1694,7 +1694,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(task_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); assert_eq!(entries[0].content, "Task: `Add header to README`"); } @@ -1716,7 +1716,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(with_desc).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); assert_eq!(entries[0].content, "Task: `Primary description`"); @@ -1734,7 +1734,7 @@ mod tests { } }"#; let parsed: ClaudeJson = serde_json::from_str(no_desc).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, "/tmp/work"); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, "/tmp/work"); assert_eq!(entries.len(), 1); assert_eq!(entries[0].content, "Task: `Only prompt`"); } @@ -1751,7 +1751,7 @@ mod tests { ); // ToolResult messages should be ignored (produce no entries) until proper support is added - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); assert_eq!(entries.len(), 0); } @@ -1761,7 +1761,7 @@ mod tests { let parsed: ClaudeJson = serde_json::from_str(assistant_with_tool_result).unwrap(); // ToolResult content items should be ignored (produce no entries) until proper support is added - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); assert_eq!(entries.len(), 0); } @@ -1770,7 +1770,7 @@ mod tests { let complex_assistant_json = r#"{"type":"assistant","message":{"role":"assistant","content":[{"type":"thinking","thinking":"I need to read the file first"},{"type":"text","text":"I'll help you with that"},{"type":"tool_result","tool_use_id":"tool_789","content":"Success","is_error":false}]}}"#; let parsed: ClaudeJson = serde_json::from_str(complex_assistant_json).unwrap(); - let entries = ClaudeLogProcessor::new().to_normalized_entries(&parsed, ""); + let entries = ClaudeLogProcessor::new().normalize_entries(&parsed, ""); // Only thinking and text entries should be processed, tool_result ignored assert_eq!(entries.len(), 2); diff --git a/crates/executors/src/executors/codex.rs b/crates/executors/src/executors/codex.rs index b80300dd..38ee67d6 100644 --- a/crates/executors/src/executors/codex.rs +++ b/crates/executors/src/executors/codex.rs @@ -1,4 +1,8 @@ -use std::{path::PathBuf, process::Stdio, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + process::Stdio, + sync::Arc, +}; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -155,7 +159,7 @@ impl Codex { impl StandardCodingAgentExecutor for Codex { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); @@ -188,7 +192,7 @@ impl StandardCodingAgentExecutor for Codex { async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, session_id: &str, ) -> Result { @@ -229,14 +233,14 @@ impl StandardCodingAgentExecutor for Codex { Ok(child) } - fn normalize_logs(&self, msg_store: Arc, current_dir: &PathBuf) { + fn normalize_logs(&self, msg_store: Arc, current_dir: &Path) { let entry_index_provider = EntryIndexProvider::start_from(&msg_store); // Process stderr logs for session extraction only (errors come through JSONL) SessionHandler::start_session_id_extraction(msg_store.clone()); // Process stdout logs (Codex's JSONL output) - let current_dir = current_dir.clone(); + let current_dir = current_dir.to_path_buf(); tokio::spawn(async move { let mut stream = msg_store.stdout_lines_stream(); use std::collections::HashMap; @@ -648,7 +652,7 @@ pub enum CodexFileChange { impl CodexJson { /// Convert to normalized entries - pub fn to_normalized_entries(&self, current_dir: &PathBuf) -> Option> { + pub fn to_normalized_entries(&self, current_dir: &Path) -> Option> { match self { CodexJson::SystemConfig { .. } => self.format_config_message().map(|content| { vec![NormalizedEntry { diff --git a/crates/executors/src/executors/cursor.rs b/crates/executors/src/executors/cursor.rs index 296e465e..f5faf626 100644 --- a/crates/executors/src/executors/cursor.rs +++ b/crates/executors/src/executors/cursor.rs @@ -1,5 +1,5 @@ use core::str; -use std::{path::PathBuf, process::Stdio, sync::Arc, time::Duration}; +use std::{path::Path, process::Stdio, sync::Arc, time::Duration}; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -61,7 +61,7 @@ impl Cursor { impl StandardCodingAgentExecutor for Cursor { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); @@ -91,7 +91,7 @@ impl StandardCodingAgentExecutor for Cursor { async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, session_id: &str, ) -> Result { @@ -122,11 +122,11 @@ impl StandardCodingAgentExecutor for Cursor { Ok(child) } - fn normalize_logs(&self, msg_store: Arc, worktree_path: &PathBuf) { + fn normalize_logs(&self, msg_store: Arc, worktree_path: &Path) { let entry_index_provider = EntryIndexProvider::start_from(&msg_store); // Process Cursor stdout JSONL with typed serde models - let current_dir = worktree_path.clone(); + let current_dir = worktree_path.to_path_buf(); tokio::spawn(async move { let mut lines = msg_store.stdout_lines_stream(); diff --git a/crates/executors/src/executors/gemini.rs b/crates/executors/src/executors/gemini.rs index 552a33a5..e8bed9e9 100644 --- a/crates/executors/src/executors/gemini.rs +++ b/crates/executors/src/executors/gemini.rs @@ -1,4 +1,8 @@ -use std::{path::PathBuf, process::Stdio, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + process::Stdio, + sync::Arc, +}; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -74,7 +78,7 @@ impl Gemini { impl StandardCodingAgentExecutor for Gemini { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); @@ -105,7 +109,7 @@ impl StandardCodingAgentExecutor for Gemini { let duplicate_stdout = stdout_dup::duplicate_stdout(&mut child)?; tokio::spawn(Self::record_session( duplicate_stdout, - current_dir.clone(), + current_dir.to_path_buf(), prompt.to_string(), false, )); @@ -115,7 +119,7 @@ impl StandardCodingAgentExecutor for Gemini { async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, _session_id: &str, ) -> Result { @@ -149,7 +153,7 @@ impl StandardCodingAgentExecutor for Gemini { let duplicate_stdout = stdout_dup::duplicate_stdout(&mut child)?; tokio::spawn(Self::record_session( duplicate_stdout, - current_dir.clone(), + current_dir.to_path_buf(), prompt.to_string(), true, )); @@ -175,7 +179,7 @@ impl StandardCodingAgentExecutor for Gemini { /// Sets up log normalization for the Gemini executor: /// - stderr via [`normalize_stderr_logs`] /// - stdout via [`PlainTextLogProcessor`] with Gemini-specific formatting and default heuristics - fn normalize_logs(&self, msg_store: Arc, worktree_path: &PathBuf) { + fn normalize_logs(&self, msg_store: Arc, worktree_path: &Path) { let entry_index_counter = EntryIndexProvider::start_from(&msg_store); normalize_stderr_logs(msg_store.clone(), entry_index_counter.clone()); @@ -335,7 +339,7 @@ impl Gemini { /// Build comprehensive prompt with session context for follow-up execution async fn build_followup_prompt( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let session_file_path = Self::get_session_file_path(current_dir).await; @@ -381,7 +385,7 @@ You are continuing work on the above task. The execution history shows the previ utils::path::get_vibe_kanban_temp_dir().join("gemini_sessions") } - async fn get_session_file_path(current_dir: &PathBuf) -> PathBuf { + async fn get_session_file_path(current_dir: &Path) -> PathBuf { let file_name = current_dir.file_name().unwrap_or_default(); let new_base = Self::get_sessions_base_dir(); let new_path = new_base.join(file_name); diff --git a/crates/executors/src/executors/mod.rs b/crates/executors/src/executors/mod.rs index 7f05f260..3c612c39 100644 --- a/crates/executors/src/executors/mod.rs +++ b/crates/executors/src/executors/mod.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, sync::Arc}; +use std::{path::Path, sync::Arc}; use async_trait::async_trait; use command_group::AsyncGroupChild; @@ -132,16 +132,16 @@ impl CodingAgent { pub trait StandardCodingAgentExecutor { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result; async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, session_id: &str, ) -> Result; - fn normalize_logs(&self, _raw_logs_event_store: Arc, _worktree_path: &PathBuf); + fn normalize_logs(&self, _raw_logs_event_store: Arc, _worktree_path: &Path); // MCP configuration methods fn default_mcp_config_path(&self) -> Option; diff --git a/crates/executors/src/executors/opencode.rs b/crates/executors/src/executors/opencode.rs index 0a1fa6f7..b8829c35 100644 --- a/crates/executors/src/executors/opencode.rs +++ b/crates/executors/src/executors/opencode.rs @@ -1,4 +1,9 @@ -use std::{fmt, path::PathBuf, process::Stdio, sync::Arc}; +use std::{ + fmt, + path::{Path, PathBuf}, + process::Stdio, + sync::Arc, +}; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -58,7 +63,7 @@ impl Opencode { impl StandardCodingAgentExecutor for Opencode { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); @@ -90,7 +95,7 @@ impl StandardCodingAgentExecutor for Opencode { async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, session_id: &str, ) -> Result { @@ -130,9 +135,8 @@ impl StandardCodingAgentExecutor for Opencode { /// 2. Error log recognition thread: read by line, identify error log lines, store them as error messages. /// 3. Main normalizer thread: read stderr by line, filter out log lines, send lines (with '\n' appended) to plain text normalizer, /// then define predicate for split and create appropriate normalized entry (either assistant or tool call). - fn normalize_logs(&self, msg_store: Arc, worktree_path: &PathBuf) { + fn normalize_logs(&self, msg_store: Arc, worktree_path: &Path) { let entry_index_counter = EntryIndexProvider::start_from(&msg_store); - let worktree_path = worktree_path.clone(); let stderr_lines = msg_store .stderr_lines_stream() @@ -169,7 +173,7 @@ impl StandardCodingAgentExecutor for Opencode { // Normalize agent logs tokio::spawn(Self::process_agent_logs( agent_logs, - worktree_path, + worktree_path.to_path_buf(), entry_index_counter, msg_store, )); @@ -247,7 +251,7 @@ impl Opencode { } /// Create normalized entry from content - pub fn create_normalized_entry(content: String, worktree_path: &PathBuf) -> NormalizedEntry { + pub fn create_normalized_entry(content: String, worktree_path: &Path) -> NormalizedEntry { // Check if this is a tool call if let Some(tool_call) = ToolCall::parse(&content) { let tool_name = tool_call.tool.name(); diff --git a/crates/executors/src/executors/qwen.rs b/crates/executors/src/executors/qwen.rs index 423c1411..d12defaf 100644 --- a/crates/executors/src/executors/qwen.rs +++ b/crates/executors/src/executors/qwen.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, process::Stdio, sync::Arc}; +use std::{path::Path, process::Stdio, sync::Arc}; use async_trait::async_trait; use command_group::{AsyncCommandGroup, AsyncGroupChild}; @@ -40,7 +40,7 @@ impl QwenCode { impl StandardCodingAgentExecutor for QwenCode { async fn spawn( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); @@ -71,7 +71,7 @@ impl StandardCodingAgentExecutor for QwenCode { async fn spawn_follow_up( &self, - current_dir: &PathBuf, + current_dir: &Path, prompt: &str, session_id: &str, ) -> Result { @@ -103,7 +103,7 @@ impl StandardCodingAgentExecutor for QwenCode { Ok(child) } - fn normalize_logs(&self, msg_store: Arc, current_dir: &PathBuf) { + fn normalize_logs(&self, msg_store: Arc, current_dir: &Path) { // QwenCode has similar output format to Gemini CLI // Use Gemini's proven sentence-break formatting instead of simple replace let entry_index_counter = EntryIndexProvider::start_from(&msg_store); diff --git a/crates/executors/src/logs/plain_text_processor.rs b/crates/executors/src/logs/plain_text_processor.rs index 25969344..313cf642 100644 --- a/crates/executors/src/logs/plain_text_processor.rs +++ b/crates/executors/src/logs/plain_text_processor.rs @@ -330,11 +330,9 @@ impl PlainTextLogProcessor { normalized_entry_producer: impl Fn(String) -> NormalizedEntry + 'static + Send, size_threshold: Option, time_gap: Option, - format_chunk: Option, String) -> String + 'static + Send>>, - transform_lines: Option) + 'static + Send>>, - message_boundary_predicate: Option< - Box Option + 'static + Send>, - >, + format_chunk: Option, + transform_lines: Option, + message_boundary_predicate: Option, index_provider: EntryIndexProvider, ) -> Self { Self { diff --git a/crates/local-deployment/src/container.rs b/crates/local-deployment/src/container.rs index 9d458be6..32f15184 100644 --- a/crates/local-deployment/src/container.rs +++ b/crates/local-deployment/src/container.rs @@ -470,6 +470,7 @@ impl LocalContainerService { } /// Get the worktree path for a task attempt + #[allow(dead_code)] async fn get_worktree_path( &self, task_attempt: &TaskAttempt, @@ -536,7 +537,6 @@ impl LocalContainerService { /// Create a live diff stream for ongoing attempts async fn create_live_diff_stream( &self, - project_repo_path: &Path, worktree_path: &Path, task_branch: &str, base_branch: &str, @@ -563,7 +563,6 @@ impl LocalContainerService { .boxed(); // Create live update stream - let project_repo_path = project_repo_path.to_path_buf(); let worktree_path = worktree_path.to_path_buf(); let task_branch = task_branch.to_string(); let base_branch = base_branch.to_string(); @@ -583,7 +582,6 @@ impl LocalContainerService { if !changed_paths.is_empty() { for event in Self::process_file_changes( &git_service, - &project_repo_path, &worktree_path, &task_branch, &base_branch, @@ -635,7 +633,6 @@ impl LocalContainerService { /// Process file changes and generate diff events fn process_file_changes( git_service: &GitService, - project_repo_path: &Path, worktree_path: &Path, task_branch: &str, base_branch: &str, @@ -958,13 +955,8 @@ impl ContainerService for LocalContainerService { let worktree_path = PathBuf::from(container_ref); // Handle ongoing attempts (live streaming diff) - self.create_live_diff_stream( - &project_repo_path, - &worktree_path, - &task_branch, - &task_attempt.base_branch, - ) - .await + self.create_live_diff_stream(&worktree_path, &task_branch, &task_attempt.base_branch) + .await } async fn try_commit_changes(&self, ctx: &ExecutionContext) -> Result { @@ -1038,8 +1030,8 @@ impl ContainerService for LocalContainerService { /// Copy files from the original project directory to the worktree async fn copy_project_files( &self, - source_dir: &PathBuf, - target_dir: &PathBuf, + source_dir: &Path, + target_dir: &Path, copy_files: &str, ) -> Result<(), ContainerError> { let files: Vec<&str> = copy_files diff --git a/crates/server/src/main.rs b/crates/server/src/main.rs index c9b817c5..282077d4 100644 --- a/crates/server/src/main.rs +++ b/crates/server/src/main.rs @@ -68,10 +68,10 @@ async fn main() -> Result<(), VibeKanbanError> { let actual_port = listener.local_addr()?.port(); // get → 53427 (example) // Write port file for discovery if prod, warn on fail - if !cfg!(debug_assertions) { - if let Err(e) = write_port_file(actual_port).await { - tracing::warn!("Failed to write port file: {}", e); - } + if !cfg!(debug_assertions) + && let Err(e) = write_port_file(actual_port).await + { + tracing::warn!("Failed to write port file: {}", e); } tracing::info!("Server running on http://{host}:{actual_port}"); diff --git a/crates/server/src/routes/projects.rs b/crates/server/src/routes/projects.rs index d0211eff..98afcac9 100644 --- a/crates/server/src/routes/projects.rs +++ b/crates/server/src/routes/projects.rs @@ -108,25 +108,25 @@ pub async fn create_project( // For new repos, create directory and initialize git // Create directory if it doesn't exist - if !path.exists() { - if let Err(e) = std::fs::create_dir_all(&path) { - tracing::error!("Failed to create directory: {}", e); - return Ok(ResponseJson(ApiResponse::error(&format!( - "Failed to create directory: {}", - e - )))); - } + if !path.exists() + && let Err(e) = std::fs::create_dir_all(&path) + { + tracing::error!("Failed to create directory: {}", e); + return Ok(ResponseJson(ApiResponse::error(&format!( + "Failed to create directory: {}", + e + )))); } // Check if it's already a git repo, if not initialize it - if !path.join(".git").exists() { - if let Err(e) = deployment.git().initialize_repo_with_main_branch(&path) { - tracing::error!("Failed to initialize git repository: {}", e); - return Ok(ResponseJson(ApiResponse::error(&format!( - "Failed to initialize git repository: {}", - e - )))); - } + if !path.join(".git").exists() + && let Err(e) = deployment.git().initialize_repo_with_main_branch(&path) + { + tracing::error!("Failed to initialize git repository: {}", e); + return Ok(ResponseJson(ApiResponse::error(&format!( + "Failed to initialize git repository: {}", + e + )))); } } diff --git a/crates/server/src/routes/task_attempts.rs b/crates/server/src/routes/task_attempts.rs index f5bef9b1..4e66e651 100644 --- a/crates/server/src/routes/task_attempts.rs +++ b/crates/server/src/routes/task_attempts.rs @@ -297,11 +297,11 @@ pub async fn merge_task_attempt( let mut commit_message = format!("{} (vibe-kanban {})", ctx.task.title, first_uuid_section); // Add description on next line if it exists - if let Some(description) = &ctx.task.description { - if !description.trim().is_empty() { - commit_message.push_str("\n\n"); - commit_message.push_str(description); - } + if let Some(description) = &ctx.task.description + && !description.trim().is_empty() + { + commit_message.push_str("\n\n"); + commit_message.push_str(description); } // Get branch name from task attempt @@ -716,15 +716,11 @@ pub async fn rebase_task_attempt( github_config.token(), )?; - if let Some(new_base_branch) = &effective_base_branch { - if new_base_branch != &ctx.task_attempt.base_branch { - TaskAttempt::update_base_branch( - &deployment.db().pool, - task_attempt.id, - new_base_branch, - ) + if let Some(new_base_branch) = &effective_base_branch + && new_base_branch != &ctx.task_attempt.base_branch + { + TaskAttempt::update_base_branch(&deployment.db().pool, task_attempt.id, new_base_branch) .await?; - } } Ok(ResponseJson(ApiResponse::success(()))) diff --git a/crates/services/Cargo.toml b/crates/services/Cargo.toml index bb00a703..b858daff 100644 --- a/crates/services/Cargo.toml +++ b/crates/services/Cargo.toml @@ -3,6 +3,10 @@ name = "services" version = "0.0.75" edition = "2024" +[features] +default = [] +cloud = [] + [dependencies] utils = { path = "../utils" } executors = { path = "../executors" } diff --git a/crates/services/src/services/container.rs b/crates/services/src/services/container.rs index 30933d81..369236c7 100644 --- a/crates/services/src/services/container.rs +++ b/crates/services/src/services/container.rs @@ -1,6 +1,6 @@ use std::{ collections::HashMap, - path::PathBuf, + path::{Path, PathBuf}, sync::{ Arc, atomic::{AtomicUsize, Ordering}, @@ -128,8 +128,8 @@ pub trait ContainerService { async fn copy_project_files( &self, - source_dir: &PathBuf, - target_dir: &PathBuf, + source_dir: &Path, + target_dir: &Path, copy_files: &str, ) -> Result<(), ContainerError>; diff --git a/crates/services/src/services/events.rs b/crates/services/src/services/events.rs index 46fda1c3..34c5aae2 100644 --- a/crates/services/src/services/events.rs +++ b/crates/services/src/services/events.rs @@ -81,6 +81,7 @@ pub mod task_patch { pub struct EventService { msg_store: Arc, db: DBService, + #[allow(dead_code)] entry_count: Arc>, } diff --git a/crates/services/src/services/file_ranker.rs b/crates/services/src/services/file_ranker.rs index f7c7bf86..5f808320 100644 --- a/crates/services/src/services/file_ranker.rs +++ b/crates/services/src/services/file_ranker.rs @@ -2,7 +2,6 @@ use std::{ collections::HashMap, path::{Path, PathBuf}, sync::Arc, - time::Instant, }; use chrono::{DateTime, Utc}; @@ -32,7 +31,6 @@ pub type FileStats = HashMap; struct RepoHistoryCache { head_sha: String, stats: Arc, - generated_at: Instant, } /// Global cache for file ranking statistics @@ -147,7 +145,6 @@ impl FileRanker { RepoHistoryCache { head_sha: head_info.oid, stats: Arc::clone(&stats_arc), - generated_at: Instant::now(), }, ); } diff --git a/crates/services/src/services/filesystem_watcher.rs b/crates/services/src/services/filesystem_watcher.rs index 2fa8985a..8da0aba9 100644 --- a/crates/services/src/services/filesystem_watcher.rs +++ b/crates/services/src/services/filesystem_watcher.rs @@ -5,7 +5,7 @@ use std::{ }; use futures::{ - SinkExt, StreamExt, + SinkExt, channel::mpsc::{Receiver, channel}, }; use ignore::{ @@ -18,6 +18,12 @@ use notify_debouncer_full::{ }; use thiserror::Error; +pub type WatcherComponents = ( + Debouncer, + Receiver, + PathBuf, +); + #[derive(Debug, Error)] pub enum FilesystemWatcherError { #[error(transparent)] @@ -100,16 +106,7 @@ fn debounced_should_forward(event: &DebouncedEvent, gi: &Gitignore, canonical_ro .all(|path| path_allowed(path, gi, canonical_root)) } -pub fn async_watcher( - root: PathBuf, -) -> Result< - ( - Debouncer, - Receiver, - PathBuf, - ), - FilesystemWatcherError, -> { +pub fn async_watcher(root: PathBuf) -> Result { let canonical_root = canonicalize_lossy(&root); let gi_set = Arc::new(build_gitignore_set(&canonical_root)?); let (mut tx, rx) = channel(64); // Increased capacity for error bursts @@ -151,18 +148,3 @@ pub fn async_watcher( Ok((debouncer, rx, canonical_root)) } - -async fn async_watch>(path: P) -> Result<(), FilesystemWatcherError> { - let (_debouncer, mut rx, _canonical_path) = async_watcher(path.as_ref().to_path_buf())?; - - // The debouncer is already watching the path, no need to call watch() again - - while let Some(res) = rx.next().await { - match res { - Ok(event) => println!("changed: {event:?}"), - Err(e) => println!("watch error: {e:?}"), - } - } - - Ok(()) -} diff --git a/crates/services/src/services/git.rs b/crates/services/src/services/git.rs index db6baae1..79745bfd 100644 --- a/crates/services/src/services/git.rs +++ b/crates/services/src/services/git.rs @@ -2,8 +2,8 @@ use std::{collections::HashMap, path::Path}; use chrono::{DateTime, Utc}; use git2::{ - BranchType, CherrypickOptions, Delta, DiffFindOptions, DiffOptions, Error as GitError, - FetchOptions, Reference, Remote, Repository, Sort, build::CheckoutBuilder, + BranchType, Delta, DiffFindOptions, DiffOptions, Error as GitError, FetchOptions, Reference, + Remote, Repository, Sort, build::CheckoutBuilder, }; use regex; use serde::Serialize; @@ -1383,97 +1383,6 @@ impl GitService { Ok(()) } - /// Find the merge-base between two commits - fn get_merge_base( - repo: &Repository, - commit1: git2::Oid, - commit2: git2::Oid, - ) -> Result { - repo.merge_base(commit1, commit2) - .map_err(GitServiceError::Git) - } - - /// Find commits that are unique to the task branch (not in either base branch) - fn find_unique_commits( - repo: &Repository, - task_branch_commit: git2::Oid, - old_base_commit: git2::Oid, - new_base_commit: git2::Oid, - ) -> Result, GitServiceError> { - // Find merge-base between task branch and old base branch - let task_old_base_merge_base = - Self::get_merge_base(repo, task_branch_commit, old_base_commit)?; - - // Find merge-base between old base and new base - let old_new_base_merge_base = Self::get_merge_base(repo, old_base_commit, new_base_commit)?; - - // Get all commits from task branch back to the merge-base with old base - let mut walker = repo.revwalk()?; - walker.push(task_branch_commit)?; - walker.hide(task_old_base_merge_base)?; - - let mut task_commits = Vec::new(); - for commit_id in walker { - let commit_id = commit_id?; - - // Check if this commit is not in the old base branch lineage - // (i.e., it's not between old_new_base_merge_base and old_base_commit) - let is_in_old_base = repo - .graph_descendant_of(commit_id, old_new_base_merge_base) - .unwrap_or(false) - && repo - .graph_descendant_of(old_base_commit, commit_id) - .unwrap_or(false); - - if !is_in_old_base { - task_commits.push(commit_id); - } - } - - // Reverse to get chronological order for cherry-picking - task_commits.reverse(); - Ok(task_commits) - } - - /// Cherry-pick specific commits onto a new base - fn cherry_pick_commits( - repo: &Repository, - commits: &[git2::Oid], - signature: &git2::Signature, - ) -> Result<(), GitServiceError> { - for &commit_id in commits { - let commit = repo.find_commit(commit_id)?; - - // Cherry-pick the commit - let mut cherrypick_opts = CherrypickOptions::new(); - repo.cherrypick(&commit, Some(&mut cherrypick_opts))?; - - // Check for conflicts - let mut index = repo.index()?; - if index.has_conflicts() { - return Err(GitServiceError::MergeConflicts(format!( - "Cherry-pick failed due to conflicts on commit {commit_id}, please resolve conflicts manually" - ))); - } - - // Commit the cherry-pick - let tree_id = index.write_tree()?; - let tree = repo.find_tree(tree_id)?; - let head_commit = repo.head()?.peel_to_commit()?; - - repo.commit( - Some("HEAD"), - signature, - signature, - commit.message().unwrap_or("Cherry-picked commit"), - &tree, - &[&head_commit], - )?; - } - - Ok(()) - } - /// Clone a repository to the specified directory #[cfg(feature = "cloud")] pub fn clone_repository(