diff --git a/crates/db/.sqlx/query-acdb8488d9d698e8522a1a1a062f560857e70cf8c1dee1eaecd75b096911cb17.json b/crates/db/.sqlx/query-2494dbc96dfeed84122a142ca4b4ce5166875560295794e09ffff754861fd765.json similarity index 73% rename from crates/db/.sqlx/query-acdb8488d9d698e8522a1a1a062f560857e70cf8c1dee1eaecd75b096911cb17.json rename to crates/db/.sqlx/query-2494dbc96dfeed84122a142ca4b4ce5166875560295794e09ffff754861fd765.json index a195c918..9778d1d8 100644 --- a/crates/db/.sqlx/query-acdb8488d9d698e8522a1a1a062f560857e70cf8c1dee1eaecd75b096911cb17.json +++ b/crates/db/.sqlx/query-2494dbc96dfeed84122a142ca4b4ce5166875560295794e09ffff754861fd765.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n merge_commit,\n base_branch,\n base_coding_agent AS \"base_coding_agent!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n WHERE id = $1", + "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n merge_commit,\n base_branch,\n profile AS \"profile!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n WHERE id = $1", "describe": { "columns": [ { @@ -34,7 +34,7 @@ "type_info": "Text" }, { - "name": "base_coding_agent!", + "name": "profile!", "ordinal": 6, "type_info": "Text" }, @@ -100,5 +100,5 @@ false ] }, - "hash": "acdb8488d9d698e8522a1a1a062f560857e70cf8c1dee1eaecd75b096911cb17" + "hash": "2494dbc96dfeed84122a142ca4b4ce5166875560295794e09ffff754861fd765" } diff --git a/crates/db/.sqlx/query-1f1850b240af8edf2a05ad4a250c78331f69f3637f4b8a554898b9e6ba5bba37.json b/crates/db/.sqlx/query-5f44ebd79693cfe8f0eab52c1a41533bb78d340771a3ac178f7745852785c843.json similarity index 73% rename from crates/db/.sqlx/query-1f1850b240af8edf2a05ad4a250c78331f69f3637f4b8a554898b9e6ba5bba37.json rename to crates/db/.sqlx/query-5f44ebd79693cfe8f0eab52c1a41533bb78d340771a3ac178f7745852785c843.json index da50ab4f..479f4c59 100644 --- a/crates/db/.sqlx/query-1f1850b240af8edf2a05ad4a250c78331f69f3637f4b8a554898b9e6ba5bba37.json +++ b/crates/db/.sqlx/query-5f44ebd79693cfe8f0eab52c1a41533bb78d340771a3ac178f7745852785c843.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n merge_commit,\n base_branch,\n base_coding_agent AS \"base_coding_agent!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n WHERE rowid = $1", + "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n merge_commit,\n base_branch,\n profile AS \"profile!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n WHERE rowid = $1", "describe": { "columns": [ { @@ -34,7 +34,7 @@ "type_info": "Text" }, { - "name": "base_coding_agent!", + "name": "profile!", "ordinal": 6, "type_info": "Text" }, @@ -100,5 +100,5 @@ false ] }, - "hash": "1f1850b240af8edf2a05ad4a250c78331f69f3637f4b8a554898b9e6ba5bba37" + "hash": "5f44ebd79693cfe8f0eab52c1a41533bb78d340771a3ac178f7745852785c843" } diff --git a/crates/db/.sqlx/query-f58b737bf1deb0e8d57fca5b99423e8ba438949679816316ef446e0b7b8eb3e6.json b/crates/db/.sqlx/query-63ec4a57694b679a59ae7b633ca6dd5b5fb25bd64467a6c68f18729c33a4455b.json similarity index 90% rename from crates/db/.sqlx/query-f58b737bf1deb0e8d57fca5b99423e8ba438949679816316ef446e0b7b8eb3e6.json rename to crates/db/.sqlx/query-63ec4a57694b679a59ae7b633ca6dd5b5fb25bd64467a6c68f18729c33a4455b.json index 9e0d0b39..dab993c9 100644 --- a/crates/db/.sqlx/query-f58b737bf1deb0e8d57fca5b99423e8ba438949679816316ef446e0b7b8eb3e6.json +++ b/crates/db/.sqlx/query-63ec4a57694b679a59ae7b633ca6dd5b5fb25bd64467a6c68f18729c33a4455b.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT \n id as \"id!: Uuid\", \n task_attempt_id as \"task_attempt_id!: Uuid\", \n run_reason as \"run_reason!: ExecutionProcessRunReason\",\n executor_action as \"executor_action!: sqlx::types::Json\",\n status as \"status!: ExecutionProcessStatus\",\n exit_code,\n started_at as \"started_at!: DateTime\",\n completed_at as \"completed_at?: DateTime\",\n created_at as \"created_at!: DateTime\", \n updated_at as \"updated_at!: DateTime\"\n FROM execution_processes \n WHERE task_attempt_id = $1 \n AND executor_action_type = $2\n ORDER BY created_at DESC \n LIMIT 1", + "query": "SELECT \n id as \"id!: Uuid\", \n task_attempt_id as \"task_attempt_id!: Uuid\", \n run_reason as \"run_reason!: ExecutionProcessRunReason\",\n executor_action as \"executor_action!: sqlx::types::Json\",\n status as \"status!: ExecutionProcessStatus\",\n exit_code,\n started_at as \"started_at!: DateTime\",\n completed_at as \"completed_at?: DateTime\",\n created_at as \"created_at!: DateTime\", \n updated_at as \"updated_at!: DateTime\"\n FROM execution_processes \n WHERE task_attempt_id = $1 \n AND run_reason = $2\n ORDER BY created_at DESC \n LIMIT 1", "describe": { "columns": [ { @@ -70,5 +70,5 @@ false ] }, - "hash": "f58b737bf1deb0e8d57fca5b99423e8ba438949679816316ef446e0b7b8eb3e6" + "hash": "63ec4a57694b679a59ae7b633ca6dd5b5fb25bd64467a6c68f18729c33a4455b" } diff --git a/crates/db/.sqlx/query-a500d5054ba09e64a4f98500a5c600ba66b9c919af26ae6ca79b1cc82d138158.json b/crates/db/.sqlx/query-703270b5172b81852470a886a72d9f749cbef0078e786582d23eb18e8cf11119.json similarity index 68% rename from crates/db/.sqlx/query-a500d5054ba09e64a4f98500a5c600ba66b9c919af26ae6ca79b1cc82d138158.json rename to crates/db/.sqlx/query-703270b5172b81852470a886a72d9f749cbef0078e786582d23eb18e8cf11119.json index 5d963554..cc0a043e 100644 --- a/crates/db/.sqlx/query-a500d5054ba09e64a4f98500a5c600ba66b9c919af26ae6ca79b1cc82d138158.json +++ b/crates/db/.sqlx/query-703270b5172b81852470a886a72d9f749cbef0078e786582d23eb18e8cf11119.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT ta.id AS \"id!: Uuid\",\n ta.task_id AS \"task_id!: Uuid\",\n ta.container_ref,\n ta.branch,\n ta.base_branch,\n ta.merge_commit,\n ta.base_coding_agent AS \"base_coding_agent!\",\n ta.pr_url,\n ta.pr_number,\n ta.pr_status,\n ta.pr_merged_at AS \"pr_merged_at: DateTime\",\n ta.worktree_deleted AS \"worktree_deleted!: bool\",\n ta.setup_completed_at AS \"setup_completed_at: DateTime\",\n ta.created_at AS \"created_at!: DateTime\",\n ta.updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts ta\n JOIN tasks t ON ta.task_id = t.id\n JOIN projects p ON t.project_id = p.id\n WHERE ta.id = $1 AND t.id = $2 AND p.id = $3", + "query": "SELECT ta.id AS \"id!: Uuid\",\n ta.task_id AS \"task_id!: Uuid\",\n ta.container_ref,\n ta.branch,\n ta.base_branch,\n ta.merge_commit,\n ta.profile AS \"profile!\",\n ta.pr_url,\n ta.pr_number,\n ta.pr_status,\n ta.pr_merged_at AS \"pr_merged_at: DateTime\",\n ta.worktree_deleted AS \"worktree_deleted!: bool\",\n ta.setup_completed_at AS \"setup_completed_at: DateTime\",\n ta.created_at AS \"created_at!: DateTime\",\n ta.updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts ta\n JOIN tasks t ON ta.task_id = t.id\n JOIN projects p ON t.project_id = p.id\n WHERE ta.id = $1 AND t.id = $2 AND p.id = $3", "describe": { "columns": [ { @@ -34,7 +34,7 @@ "type_info": "Text" }, { - "name": "base_coding_agent!", + "name": "profile!", "ordinal": 6, "type_info": "Text" }, @@ -100,5 +100,5 @@ false ] }, - "hash": "a500d5054ba09e64a4f98500a5c600ba66b9c919af26ae6ca79b1cc82d138158" + "hash": "703270b5172b81852470a886a72d9f749cbef0078e786582d23eb18e8cf11119" } diff --git a/crates/db/.sqlx/query-8c691c79539b34f91f09e6dce51eb684840804f9279f9990cfdcb9015453d9d8.json b/crates/db/.sqlx/query-70474b20d1a3affa13c80926f954ca2007faa6977508bb6372a867bdc56c4830.json similarity index 70% rename from crates/db/.sqlx/query-8c691c79539b34f91f09e6dce51eb684840804f9279f9990cfdcb9015453d9d8.json rename to crates/db/.sqlx/query-70474b20d1a3affa13c80926f954ca2007faa6977508bb6372a867bdc56c4830.json index 1c504d19..360aaad5 100644 --- a/crates/db/.sqlx/query-8c691c79539b34f91f09e6dce51eb684840804f9279f9990cfdcb9015453d9d8.json +++ b/crates/db/.sqlx/query-70474b20d1a3affa13c80926f954ca2007faa6977508bb6372a867bdc56c4830.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n base_branch,\n merge_commit,\n base_coding_agent AS \"base_coding_agent!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n WHERE task_id = $1\n ORDER BY created_at DESC", + "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n base_branch,\n merge_commit,\n profile AS \"profile!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n WHERE task_id = $1\n ORDER BY created_at DESC", "describe": { "columns": [ { @@ -34,7 +34,7 @@ "type_info": "Text" }, { - "name": "base_coding_agent!", + "name": "profile!", "ordinal": 6, "type_info": "Text" }, @@ -100,5 +100,5 @@ false ] }, - "hash": "8c691c79539b34f91f09e6dce51eb684840804f9279f9990cfdcb9015453d9d8" + "hash": "70474b20d1a3affa13c80926f954ca2007faa6977508bb6372a867bdc56c4830" } diff --git a/crates/db/.sqlx/query-1174eecd9f26565a4f4e1e367b5d7c90b4d19b793e496c2e01593f32c5101f24.json b/crates/db/.sqlx/query-741e831f4509958c508da36e955e381905672b39fc121170418015e2512184a2.json similarity index 70% rename from crates/db/.sqlx/query-1174eecd9f26565a4f4e1e367b5d7c90b4d19b793e496c2e01593f32c5101f24.json rename to crates/db/.sqlx/query-741e831f4509958c508da36e955e381905672b39fc121170418015e2512184a2.json index 64f8c018..5065b645 100644 --- a/crates/db/.sqlx/query-1174eecd9f26565a4f4e1e367b5d7c90b4d19b793e496c2e01593f32c5101f24.json +++ b/crates/db/.sqlx/query-741e831f4509958c508da36e955e381905672b39fc121170418015e2512184a2.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "INSERT INTO task_attempts (id, task_id, container_ref, branch, base_branch, merge_commit, base_coding_agent, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted, setup_completed_at)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)\n RETURNING id as \"id!: Uuid\", task_id as \"task_id!: Uuid\", container_ref, branch, base_branch, merge_commit, base_coding_agent as \"base_coding_agent!\", pr_url, pr_number, pr_status, pr_merged_at as \"pr_merged_at: DateTime\", worktree_deleted as \"worktree_deleted!: bool\", setup_completed_at as \"setup_completed_at: DateTime\", created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"", + "query": "INSERT INTO task_attempts (id, task_id, container_ref, branch, base_branch, merge_commit, profile, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted, setup_completed_at)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)\n RETURNING id as \"id!: Uuid\", task_id as \"task_id!: Uuid\", container_ref, branch, base_branch, merge_commit, profile as \"profile!\", pr_url, pr_number, pr_status, pr_merged_at as \"pr_merged_at: DateTime\", worktree_deleted as \"worktree_deleted!: bool\", setup_completed_at as \"setup_completed_at: DateTime\", created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"", "describe": { "columns": [ { @@ -34,7 +34,7 @@ "type_info": "Text" }, { - "name": "base_coding_agent!", + "name": "profile!", "ordinal": 6, "type_info": "Text" }, @@ -100,5 +100,5 @@ false ] }, - "hash": "1174eecd9f26565a4f4e1e367b5d7c90b4d19b793e496c2e01593f32c5101f24" + "hash": "741e831f4509958c508da36e955e381905672b39fc121170418015e2512184a2" } diff --git a/crates/db/.sqlx/query-8f5d9d112659d04406c20c885f72c075b355e54836930226fc84390c5a4516f7.json b/crates/db/.sqlx/query-8a1b8a47f4405a3e4a5bc41db8ec40af31f748587b61b7821f7e326ce9e23a75.json similarity index 71% rename from crates/db/.sqlx/query-8f5d9d112659d04406c20c885f72c075b355e54836930226fc84390c5a4516f7.json rename to crates/db/.sqlx/query-8a1b8a47f4405a3e4a5bc41db8ec40af31f748587b61b7821f7e326ce9e23a75.json index 2ddbbbed..26ed2833 100644 --- a/crates/db/.sqlx/query-8f5d9d112659d04406c20c885f72c075b355e54836930226fc84390c5a4516f7.json +++ b/crates/db/.sqlx/query-8a1b8a47f4405a3e4a5bc41db8ec40af31f748587b61b7821f7e326ce9e23a75.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n base_branch,\n merge_commit,\n base_coding_agent AS \"base_coding_agent!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n ORDER BY created_at DESC", + "query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n container_ref,\n branch,\n base_branch,\n merge_commit,\n profile AS \"profile!\",\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM task_attempts\n ORDER BY created_at DESC", "describe": { "columns": [ { @@ -34,7 +34,7 @@ "type_info": "Text" }, { - "name": "base_coding_agent!", + "name": "profile!", "ordinal": 6, "type_info": "Text" }, @@ -100,5 +100,5 @@ false ] }, - "hash": "8f5d9d112659d04406c20c885f72c075b355e54836930226fc84390c5a4516f7" + "hash": "8a1b8a47f4405a3e4a5bc41db8ec40af31f748587b61b7821f7e326ce9e23a75" } diff --git a/crates/db/.sqlx/query-cb2d1da9c3e3ad9f09ea30165f5fe584fef35a015038e83a548edb59ecadaa18.json b/crates/db/.sqlx/query-f338f880ec72989bcaabe3ae3e843fe1faabc1f990f2c91ceb30b76b0fe43153.json similarity index 86% rename from crates/db/.sqlx/query-cb2d1da9c3e3ad9f09ea30165f5fe584fef35a015038e83a548edb59ecadaa18.json rename to crates/db/.sqlx/query-f338f880ec72989bcaabe3ae3e843fe1faabc1f990f2c91ceb30b76b0fe43153.json index c7164bcf..f1086a87 100644 --- a/crates/db/.sqlx/query-cb2d1da9c3e3ad9f09ea30165f5fe584fef35a015038e83a548edb59ecadaa18.json +++ b/crates/db/.sqlx/query-f338f880ec72989bcaabe3ae3e843fe1faabc1f990f2c91ceb30b76b0fe43153.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT\n t.id AS \"id!: Uuid\",\n t.project_id AS \"project_id!: Uuid\",\n t.title,\n t.description,\n t.status AS \"status!: TaskStatus\",\n t.parent_task_attempt AS \"parent_task_attempt: Uuid\",\n t.created_at AS \"created_at!: DateTime\",\n t.updated_at AS \"updated_at!: DateTime\",\n\n CASE WHEN EXISTS (\n SELECT 1\n FROM task_attempts ta\n JOIN execution_processes ep\n ON ep.task_attempt_id = ta.id\n WHERE ta.task_id = t.id\n AND ep.status = 'running'\n AND ep.run_reason IN ('setupscript','cleanupscript','codingagent')\n LIMIT 1\n ) THEN 1 ELSE 0 END AS \"has_in_progress_attempt!: i64\",\n\n CASE WHEN EXISTS (\n SELECT 1\n FROM task_attempts ta\n WHERE ta.task_id = t.id\n AND ta.merge_commit IS NOT NULL\n LIMIT 1\n ) THEN 1 ELSE 0 END AS \"has_merged_attempt!: i64\",\n\n CASE WHEN (\n SELECT ep.status\n FROM task_attempts ta\n JOIN execution_processes ep\n ON ep.task_attempt_id = ta.id\n WHERE ta.task_id = t.id\n AND ep.run_reason IN ('setupscript','cleanupscript','codingagent')\n ORDER BY ep.created_at DESC\n LIMIT 1\n ) IN ('failed','killed') THEN 1 ELSE 0 END\n AS \"last_attempt_failed!: i64\",\n\n ( SELECT ta.base_coding_agent\n FROM task_attempts ta\n WHERE ta.task_id = t.id\n ORDER BY ta.created_at DESC\n LIMIT 1\n ) AS \"base_coding_agent!: String\"\n\nFROM tasks t\nWHERE t.project_id = $1\nORDER BY t.created_at DESC", + "query": "SELECT\n t.id AS \"id!: Uuid\",\n t.project_id AS \"project_id!: Uuid\",\n t.title,\n t.description,\n t.status AS \"status!: TaskStatus\",\n t.parent_task_attempt AS \"parent_task_attempt: Uuid\",\n t.created_at AS \"created_at!: DateTime\",\n t.updated_at AS \"updated_at!: DateTime\",\n\n CASE WHEN EXISTS (\n SELECT 1\n FROM task_attempts ta\n JOIN execution_processes ep\n ON ep.task_attempt_id = ta.id\n WHERE ta.task_id = t.id\n AND ep.status = 'running'\n AND ep.run_reason IN ('setupscript','cleanupscript','codingagent')\n LIMIT 1\n ) THEN 1 ELSE 0 END AS \"has_in_progress_attempt!: i64\",\n\n CASE WHEN EXISTS (\n SELECT 1\n FROM task_attempts ta\n WHERE ta.task_id = t.id\n AND ta.merge_commit IS NOT NULL\n LIMIT 1\n ) THEN 1 ELSE 0 END AS \"has_merged_attempt!: i64\",\n\n CASE WHEN (\n SELECT ep.status\n FROM task_attempts ta\n JOIN execution_processes ep\n ON ep.task_attempt_id = ta.id\n WHERE ta.task_id = t.id\n AND ep.run_reason IN ('setupscript','cleanupscript','codingagent')\n ORDER BY ep.created_at DESC\n LIMIT 1\n ) IN ('failed','killed') THEN 1 ELSE 0 END\n AS \"last_attempt_failed!: i64\",\n\n ( SELECT ta.profile\n FROM task_attempts ta\n WHERE ta.task_id = t.id\n ORDER BY ta.created_at DESC\n LIMIT 1\n ) AS \"profile!: String\"\n\nFROM tasks t\nWHERE t.project_id = $1\nORDER BY t.created_at DESC", "describe": { "columns": [ { @@ -59,7 +59,7 @@ "type_info": "Integer" }, { - "name": "base_coding_agent!: String", + "name": "profile!: String", "ordinal": 11, "type_info": "Text" } @@ -82,5 +82,5 @@ true ] }, - "hash": "cb2d1da9c3e3ad9f09ea30165f5fe584fef35a015038e83a548edb59ecadaa18" + "hash": "f338f880ec72989bcaabe3ae3e843fe1faabc1f990f2c91ceb30b76b0fe43153" } diff --git a/crates/db/migrations/20250813000001_rename_base_coding_agent_to_profile.sql b/crates/db/migrations/20250813000001_rename_base_coding_agent_to_profile.sql new file mode 100644 index 00000000..1da564e3 --- /dev/null +++ b/crates/db/migrations/20250813000001_rename_base_coding_agent_to_profile.sql @@ -0,0 +1,15 @@ +PRAGMA foreign_keys = ON; + +-- Rename base_coding_agent column to profile_label for better semantic clarity +ALTER TABLE task_attempts RENAME COLUMN base_coding_agent TO profile; +-- best effort attempt to not break older task attempts by mapping to profiles +UPDATE task_attempts +SET profile = CASE profile + WHEN 'CLAUDE_CODE' THEN 'claude-code' + WHEN 'CODEX' THEN 'codex' + WHEN 'GEMINI' THEN 'gemini' + WHEN 'AMP' THEN 'amp' + WHEN 'OPENCODE' THEN 'opencode' +END +WHERE profile IS NOT NULL + AND profile IN ('CLAUDE_CODE', 'CODEX', 'GEMINI', 'AMP', 'OPENCODE'); diff --git a/crates/db/src/models/execution_process.rs b/crates/db/src/models/execution_process.rs index df6d1142..00478bd8 100644 --- a/crates/db/src/models/execution_process.rs +++ b/crates/db/src/models/execution_process.rs @@ -1,5 +1,5 @@ use chrono::{DateTime, Utc}; -use executors::actions::{ExecutorAction, ExecutorActionKind}; +use executors::actions::ExecutorAction; use serde::{Deserialize, Serialize}; use serde_json::Value; use sqlx::{FromRow, SqlitePool, Type}; @@ -33,7 +33,7 @@ pub struct ExecutionProcess { pub id: Uuid, pub task_attempt_id: Uuid, pub run_reason: ExecutionProcessRunReason, - #[ts(skip)] + #[ts(type = "ExecutorAction")] pub executor_action: sqlx::types::Json, pub status: ExecutionProcessStatus, pub exit_code: Option, @@ -58,21 +58,6 @@ pub struct UpdateExecutionProcess { pub completed_at: Option>, } -#[derive(Debug, Clone, FromRow, Serialize, Deserialize, TS)] -pub struct ExecutionProcessSummary { - pub id: Uuid, - pub task_attempt_id: Uuid, - pub run_reason: ExecutionProcessRunReason, - #[ts(skip)] - pub executor_action: sqlx::types::Json, - pub status: ExecutionProcessStatus, - pub exit_code: Option, - pub started_at: DateTime, - pub completed_at: Option>, - pub created_at: DateTime, - pub updated_at: DateTime, -} - #[derive(Debug)] pub struct ExecutionContext { pub execution_process: ExecutionProcess, @@ -161,33 +146,6 @@ impl ExecutionProcess { .await } - /// Find execution process summaries for a task attempt (excluding stdio) - pub async fn find_summaries_by_task_attempt_id( - pool: &SqlitePool, - task_attempt_id: Uuid, - ) -> Result, sqlx::Error> { - sqlx::query_as!( - ExecutionProcessSummary, - r#"SELECT - id as "id!: Uuid", - task_attempt_id as "task_attempt_id!: Uuid", - run_reason as "run_reason!: ExecutionProcessRunReason", - executor_action as "executor_action!: sqlx::types::Json", - status as "status!: ExecutionProcessStatus", - exit_code, - started_at as "started_at!: DateTime", - completed_at as "completed_at?: DateTime", - created_at as "created_at!: DateTime", - updated_at as "updated_at!: DateTime" - FROM execution_processes - WHERE task_attempt_id = $1 - ORDER BY created_at ASC"#, - task_attempt_id - ) - .fetch_all(pool) - .await - } - /// Find running execution processes pub async fn find_running(pool: &SqlitePool) -> Result, sqlx::Error> { sqlx::query_as!( @@ -243,12 +201,11 @@ impl ExecutionProcess { } /// Find latest execution process by task attempt and executor action type - pub async fn find_latest_by_task_attempt_and_action_type( + pub async fn find_latest_by_task_attempt_and_run_reason( pool: &SqlitePool, task_attempt_id: Uuid, - executor_action: &ExecutorActionKind, + run_reason: &ExecutionProcessRunReason, ) -> Result, sqlx::Error> { - let executor_action_kind = executor_action.to_string(); sqlx::query_as!( ExecutionProcess, r#"SELECT @@ -264,11 +221,11 @@ impl ExecutionProcess { updated_at as "updated_at!: DateTime" FROM execution_processes WHERE task_attempt_id = $1 - AND executor_action_type = $2 + AND run_reason = $2 ORDER BY created_at DESC LIMIT 1"#, task_attempt_id, - executor_action_kind + run_reason ) .fetch_optional(pool) .await diff --git a/crates/db/src/models/task.rs b/crates/db/src/models/task.rs index 8f3bf169..c908c0ba 100644 --- a/crates/db/src/models/task.rs +++ b/crates/db/src/models/task.rs @@ -42,7 +42,7 @@ pub struct TaskWithAttemptStatus { pub has_in_progress_attempt: bool, pub has_merged_attempt: bool, pub last_attempt_failed: bool, - pub base_coding_agent: String, + pub profile: String, } #[derive(Debug, Deserialize, TS)] @@ -120,12 +120,12 @@ impl Task { ) IN ('failed','killed') THEN 1 ELSE 0 END AS "last_attempt_failed!: i64", - ( SELECT ta.base_coding_agent + ( SELECT ta.profile FROM task_attempts ta WHERE ta.task_id = t.id ORDER BY ta.created_at DESC LIMIT 1 - ) AS "base_coding_agent!: String" + ) AS "profile!: String" FROM tasks t WHERE t.project_id = $1 @@ -149,7 +149,7 @@ ORDER BY t.created_at DESC"#, has_in_progress_attempt: rec.has_in_progress_attempt != 0, has_merged_attempt: rec.has_merged_attempt != 0, last_attempt_failed: rec.last_attempt_failed != 0, - base_coding_agent: rec.base_coding_agent, + profile: rec.profile, }) .collect(); diff --git a/crates/db/src/models/task_attempt.rs b/crates/db/src/models/task_attempt.rs index 3334a44a..a410b17b 100644 --- a/crates/db/src/models/task_attempt.rs +++ b/crates/db/src/models/task_attempt.rs @@ -75,7 +75,7 @@ pub struct TaskAttempt { pub branch: Option, // Git branch name for this task attempt pub base_branch: String, // Base branch this attempt is based on pub merge_commit: Option, - pub base_coding_agent: String, // Name of the base coding agent to use ("AMP", "CLAUDE_CODE", + pub profile: String, // Name of the base coding agent to use ("AMP", "CLAUDE_CODE", // "GEMINI", etc.) pub pr_url: Option, // GitHub PR URL pub pr_number: Option, // GitHub PR number @@ -119,7 +119,7 @@ pub struct TaskAttemptContext { #[derive(Debug, Deserialize, TS)] pub struct CreateTaskAttempt { - pub base_coding_agent: String, + pub profile: String, pub base_branch: String, } @@ -142,7 +142,7 @@ impl TaskAttempt { branch, base_branch, merge_commit, - base_coding_agent AS "base_coding_agent!", + profile AS "profile!", pr_url, pr_number, pr_status, @@ -167,7 +167,7 @@ impl TaskAttempt { branch, base_branch, merge_commit, - base_coding_agent AS "base_coding_agent!", + profile AS "profile!", pr_url, pr_number, pr_status, @@ -203,7 +203,7 @@ impl TaskAttempt { ta.branch, ta.base_branch, ta.merge_commit, - ta.base_coding_agent AS "base_coding_agent!", + ta.profile AS "profile!", ta.pr_url, ta.pr_number, ta.pr_status, @@ -298,7 +298,7 @@ impl TaskAttempt { branch, merge_commit, base_branch, - base_coding_agent AS "base_coding_agent!", + profile AS "profile!", pr_url, pr_number, pr_status, @@ -324,7 +324,7 @@ impl TaskAttempt { branch, merge_commit, base_branch, - base_coding_agent AS "base_coding_agent!", + profile AS "profile!", pr_url, pr_number, pr_status, @@ -481,16 +481,16 @@ impl TaskAttempt { // Insert the record into the database Ok(sqlx::query_as!( TaskAttempt, - r#"INSERT INTO task_attempts (id, task_id, container_ref, branch, base_branch, merge_commit, base_coding_agent, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted, setup_completed_at) + r#"INSERT INTO task_attempts (id, task_id, container_ref, branch, base_branch, merge_commit, profile, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted, setup_completed_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) - RETURNING id as "id!: Uuid", task_id as "task_id!: Uuid", container_ref, branch, base_branch, merge_commit, base_coding_agent as "base_coding_agent!", pr_url, pr_number, pr_status, pr_merged_at as "pr_merged_at: DateTime", worktree_deleted as "worktree_deleted!: bool", setup_completed_at as "setup_completed_at: DateTime", created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime""#, + RETURNING id as "id!: Uuid", task_id as "task_id!: Uuid", container_ref, branch, base_branch, merge_commit, profile as "profile!", pr_url, pr_number, pr_status, pr_merged_at as "pr_merged_at: DateTime", worktree_deleted as "worktree_deleted!: bool", setup_completed_at as "setup_completed_at: DateTime", created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime""#, attempt_id, task_id, Option::::None, // Container isn't known yet Option::::None, // branch name isn't known yet data.base_branch, Option::::None, // merge_commit is always None during creation - data.base_coding_agent, + data.profile, Option::::None, // pr_url is None during creation Option::::None, // pr_number is None during creation Option::::None, // pr_status is None during creation diff --git a/crates/executors/Cargo.toml b/crates/executors/Cargo.toml index eff3d8fb..19ac3857 100644 --- a/crates/executors/Cargo.toml +++ b/crates/executors/Cargo.toml @@ -11,10 +11,11 @@ bytes = "1.0" serde = { workspace = true } serde_json = { workspace = true } tracing = { workspace = true } +toml = "0.8" tracing-subscriber = { workspace = true } chrono = { version = "0.4", features = ["serde"] } uuid = { version = "1.0", features = ["v4", "serde"] } -ts-rs = { workspace = true } +ts-rs = { workspace = true, features = ["serde-json-impl"]} dirs = "5.0" xdg = "3.0" async-trait = "0.1" @@ -25,13 +26,11 @@ regex = "1.11.1" sentry-tracing = { version = "0.41.0", features = ["backtrace"] } lazy_static = "1.4" json-patch = "2.0" -strum_macros = "0.27.2" thiserror = { workspace = true } enum_dispatch = "0.3.13" futures-io = "0.3.31" tokio-stream = { version = "0.1.17", features = ["io-util"] } futures = "0.3.31" -strum = "0.27.2" bon = "3.6" fork_stream = "0.1.0" os_pipe = "1.2" diff --git a/crates/executors/default_profiles.json b/crates/executors/default_profiles.json new file mode 100644 index 00000000..ac4c96e5 --- /dev/null +++ b/crates/executors/default_profiles.json @@ -0,0 +1,138 @@ +{ + "profiles": [ + { + "label": "claude-code", + "mcp_config_path": null, + "CLAUDE_CODE": { + "command": { + "base": "npx -y @anthropic-ai/claude-code@latest", + "params": [ + "-p", + "--dangerously-skip-permissions", + "--verbose", + "--output-format=stream-json" + ] + }, + "plan": false + }, + "variants": [ + { + "label": "plan", + "mcp_config_path": null, + "CLAUDE_CODE": { + "command": { + "base": "npx -y @anthropic-ai/claude-code@latest", + "params": [ + "-p", + "--permission-mode=plan", + "--verbose", + "--output-format=stream-json" + ] + }, + "plan": true + } + } + ] + }, + { + "label": "claude-code-router", + "mcp_config_path": null, + "CLAUDE_CODE": { + "command": { + "base": "npx -y @musistudio/claude-code-router code", + "params": [ + "-p", + "--dangerously-skip-permissions", + "--verbose", + "--output-format=stream-json" + ] + }, + "plan": false + }, + "variants": [] + }, + { + "label": "amp", + "mcp_config_path": null, + "AMP": { + "command": { + "base": "npx -y @sourcegraph/amp@0.0.1752148945-gd8844f", + "params": [ + "--format=jsonl", + "--dangerously-allow-all" + ] + } + }, + "variants": [] + }, + { + "label": "gemini", + "mcp_config_path": null, + "GEMINI": { + "command": { + "base": "npx -y @google/gemini-cli@latest", + "params": [ + "--yolo" + ] + } + }, + "variants": [] + }, + { + "label": "codex", + "mcp_config_path": null, + "CODEX": { + "command": { + "base": "npx -y @openai/codex exec", + "params": [ + "--json", + "--dangerously-bypass-approvals-and-sandbox", + "--skip-git-repo-check" + ] + } + }, + "variants": [] + }, + { + "label": "opencode", + "mcp_config_path": null, + "OPENCODE": { + "command": { + "base": "npx -y opencode-ai@latest run", + "params": [ + "--print-logs" + ] + } + }, + "variants": [] + }, + { + "label": "qwen-code", + "mcp_config_path": "~/.qwen/settings.json", + "GEMINI": { + "command": { + "base": "npx -y @qwen-code/qwen-code@latest", + "params": [ + "--yolo" + ] + } + }, + "variants": [] + }, + { + "label": "cursor", + "mcp_config_path": null, + "CURSOR": { + "command": { + "base": "cursor-agent", + "params": [ + "-p", + "--output-format=stream-json", + "--force" + ] + } + }, + "variants": [] + } + ] +} \ No newline at end of file diff --git a/crates/executors/src/actions/coding_agent_follow_up.rs b/crates/executors/src/actions/coding_agent_follow_up.rs index f38f4491..fbe4fb7f 100644 --- a/crates/executors/src/actions/coding_agent_follow_up.rs +++ b/crates/executors/src/actions/coding_agent_follow_up.rs @@ -8,19 +8,20 @@ use ts_rs::TS; use crate::{ actions::Executable, executors::{CodingAgent, ExecutorError, StandardCodingAgentExecutor}, + profile::ProfileVariantLabel, }; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct CodingAgentFollowUpRequest { pub prompt: String, pub session_id: String, - pub profile: String, + pub profile_variant_label: ProfileVariantLabel, } #[async_trait] impl Executable for CodingAgentFollowUpRequest { async fn spawn(&self, current_dir: &PathBuf) -> Result { - let executor = CodingAgent::from_profile_str(&self.profile)?; + let executor = CodingAgent::from_profile_variant_label(&self.profile_variant_label)?; executor .spawn_follow_up(current_dir, &self.prompt, &self.session_id) .await diff --git a/crates/executors/src/actions/coding_agent_initial.rs b/crates/executors/src/actions/coding_agent_initial.rs index 740a6132..8cdc80af 100644 --- a/crates/executors/src/actions/coding_agent_initial.rs +++ b/crates/executors/src/actions/coding_agent_initial.rs @@ -8,18 +8,19 @@ use ts_rs::TS; use crate::{ actions::Executable, executors::{CodingAgent, ExecutorError, StandardCodingAgentExecutor}, + profile::ProfileVariantLabel, }; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct CodingAgentInitialRequest { pub prompt: String, - pub profile: String, + pub profile_variant_label: ProfileVariantLabel, } #[async_trait] impl Executable for CodingAgentInitialRequest { async fn spawn(&self, current_dir: &PathBuf) -> Result { - let executor = CodingAgent::from_profile_str(&self.profile)?; + let executor = CodingAgent::from_profile_variant_label(&self.profile_variant_label)?; executor.spawn(current_dir, &self.prompt).await } } diff --git a/crates/executors/src/actions/mod.rs b/crates/executors/src/actions/mod.rs index fe57c5ce..c84042b4 100644 --- a/crates/executors/src/actions/mod.rs +++ b/crates/executors/src/actions/mod.rs @@ -4,7 +4,6 @@ use async_trait::async_trait; use command_group::AsyncGroupChild; use enum_dispatch::enum_dispatch; use serde::{Deserialize, Serialize}; -use strum_macros::{Display, EnumDiscriminants}; use ts_rs::TS; use crate::{ @@ -19,26 +18,14 @@ pub mod coding_agent_initial; pub mod script; #[enum_dispatch] -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS, EnumDiscriminants, Display)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] #[serde(tag = "type")] -#[strum_discriminants(name(ExecutorActionKind), derive(Display))] pub enum ExecutorActionType { CodingAgentInitialRequest, CodingAgentFollowUpRequest, ScriptRequest, } -impl ExecutorActionType { - /// Get the action type as a string (matches the JSON "type" field) - pub fn action_type(&self) -> &'static str { - match self { - ExecutorActionType::CodingAgentInitialRequest(_) => "CodingAgentInitialRequest", - ExecutorActionType::CodingAgentFollowUpRequest(_) => "CodingAgentFollowUpRequest", - ExecutorActionType::ScriptRequest(_) => "ScriptRequest", - } - } -} - #[derive(Debug, Clone, Serialize, Deserialize, TS)] pub struct ExecutorAction { pub typ: ExecutorActionType, @@ -57,10 +44,6 @@ impl ExecutorAction { pub fn next_action(&self) -> Option<&Box> { self.next_action.as_ref() } - - pub fn action_type(&self) -> &'static str { - self.typ.action_type() - } } #[async_trait] diff --git a/crates/executors/src/command.rs b/crates/executors/src/command.rs index 9244a10c..f8cf129a 100644 --- a/crates/executors/src/command.rs +++ b/crates/executors/src/command.rs @@ -1,16 +1,6 @@ -use std::{ - collections::{HashMap, HashSet}, - fs, - sync::OnceLock, -}; - use serde::{Deserialize, Serialize}; use ts_rs::TS; -use crate::executors::BaseCodingAgent; - -static PROFILES_CACHE: OnceLock = OnceLock::new(); - #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct CommandBuilder { /// Base executable command (e.g., "npx -y @anthropic-ai/claude-code@latest") @@ -35,7 +25,6 @@ impl CommandBuilder { self.params = Some(params.into_iter().map(|p| p.into()).collect()); self } - pub fn build_initial(&self) -> String { let mut parts = vec![self.base.clone()]; if let Some(ref params) = self.params { @@ -53,271 +42,3 @@ impl CommandBuilder { parts.join(" ") } } - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] -pub struct AgentProfile { - /// Unique identifier for this profile (e.g., "MyClaudeCode", "FastAmp") - pub label: String, - /// The executor type this profile configures - pub agent: BaseCodingAgent, - /// Command builder configuration - pub command: CommandBuilder, - /// Optional profile-specific MCP config file path (absolute; supports leading ~). Overrides the default `BaseCodingAgent` config path - pub mcp_config_path: Option, -} - -impl AgentProfile { - pub fn claude_code() -> Self { - Self { - label: "claude-code".to_string(), - agent: BaseCodingAgent::ClaudeCode, - command: CommandBuilder::new("npx -y @anthropic-ai/claude-code@latest").params(vec![ - "-p", - "--dangerously-skip-permissions", - "--verbose", - "--output-format=stream-json", - ]), - mcp_config_path: None, - } - } - - pub fn claude_code_plan() -> Self { - Self { - label: "claude-code-plan".to_string(), - agent: BaseCodingAgent::ClaudeCode, - command: CommandBuilder::new("npx -y @anthropic-ai/claude-code@latest").params(vec![ - "-p", - "--permission-mode=plan", - "--verbose", - "--output-format=stream-json", - ]), - mcp_config_path: None, - } - } - - pub fn claude_code_router() -> Self { - Self { - label: "claude-code-router".to_string(), - agent: BaseCodingAgent::ClaudeCode, - command: CommandBuilder::new("npx -y @musistudio/claude-code-router code").params( - vec![ - "-p", - "--dangerously-skip-permissions", - "--verbose", - "--output-format=stream-json", - ], - ), - mcp_config_path: None, - } - } - - pub fn amp() -> Self { - Self { - label: "amp".to_string(), - agent: BaseCodingAgent::Amp, - command: CommandBuilder::new("npx -y @sourcegraph/amp@0.0.1752148945-gd8844f") - .params(vec!["--format=jsonl", "--dangerously-allow-all"]), - mcp_config_path: None, - } - } - - pub fn gemini() -> Self { - Self { - label: "gemini".to_string(), - agent: BaseCodingAgent::Gemini, - command: CommandBuilder::new("npx -y @google/gemini-cli@latest").params(vec!["--yolo"]), - mcp_config_path: None, - } - } - - pub fn cursor() -> Self { - Self { - label: "cursor".to_string(), - agent: BaseCodingAgent::Cursor, - command: CommandBuilder::new("cursor-agent").params(vec![ - "-p", - "--output-format=stream-json", - "--force", - ]), - mcp_config_path: None, - } - } - - pub fn codex() -> Self { - Self { - label: "codex".to_string(), - agent: BaseCodingAgent::Codex, - command: CommandBuilder::new("npx -y @openai/codex exec").params(vec![ - "--json", - "--dangerously-bypass-approvals-and-sandbox", - "--skip-git-repo-check", - ]), - mcp_config_path: None, - } - } - - pub fn qwen_code() -> Self { - Self { - label: "qwen-code".to_string(), - agent: BaseCodingAgent::Gemini, - command: CommandBuilder::new("npx -y @qwen-code/qwen-code@latest") - .params(vec!["--yolo"]), - mcp_config_path: Some("~/.qwen/settings.json".to_string()), - } - } - - pub fn opencode() -> Self { - Self { - label: "opencode".to_string(), - agent: BaseCodingAgent::Opencode, - command: CommandBuilder::new("npx -y opencode-ai@latest run") - .params(vec!["--print-logs"]), - mcp_config_path: None, - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] -pub struct AgentProfiles { - pub profiles: Vec, -} - -impl AgentProfiles { - pub fn get_cached() -> &'static AgentProfiles { - PROFILES_CACHE.get_or_init(Self::load) - } - - fn load() -> Self { - let mut profiles = Self::from_defaults(); - - if let Err(e) = profiles.extend_from_file() { - if e.kind() != std::io::ErrorKind::NotFound { - tracing::warn!("Failed to load additional profiles: {}", e); - } - } else { - tracing::info!("Loaded additional profiles from profiles.json"); - } - - profiles - } - - pub fn from_defaults() -> Self { - Self { - profiles: vec![ - AgentProfile::claude_code(), - AgentProfile::claude_code_plan(), - AgentProfile::claude_code_router(), - AgentProfile::amp(), - AgentProfile::gemini(), - AgentProfile::codex(), - AgentProfile::opencode(), - AgentProfile::qwen_code(), - AgentProfile::cursor(), - ], - } - } - - pub fn extend_from_file(&mut self) -> Result<(), std::io::Error> { - let profiles_path = utils::assets::profiles_path(); - if !profiles_path.exists() { - return Err(std::io::Error::new( - std::io::ErrorKind::NotFound, - format!("Profiles file not found at {profiles_path:?}"), - )); - } - - let content = fs::read_to_string(&profiles_path)?; - - let user_profiles: Self = serde_json::from_str(&content).map_err(|e| { - std::io::Error::new( - std::io::ErrorKind::InvalidData, - format!("Failed to parse profiles.json: {e}"), - ) - })?; - - let default_labels: HashSet = - self.profiles.iter().map(|p| p.label.clone()).collect(); - - // Only add user profiles with unique labels - for user_profile in user_profiles.profiles { - if !default_labels.contains(&user_profile.label) { - self.profiles.push(user_profile); - } else { - tracing::debug!( - "Skipping user profile '{}' - default with same label exists", - user_profile.label - ); - } - } - - Ok(()) - } - - pub fn get_profile(&self, label: &str) -> Option<&AgentProfile> { - self.profiles.iter().find(|p| p.label == label) - } - - pub fn get_profiles_for_agent(&self, agent: &BaseCodingAgent) -> Vec<&AgentProfile> { - self.profiles.iter().filter(|p| &p.agent == agent).collect() - } - - pub fn to_map(&self) -> HashMap { - self.profiles - .iter() - .map(|p| (p.label.clone(), p.clone())) - .collect() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn default_profiles_have_expected_base_and_noninteractive_or_json_flags() { - // Build default profiles and make lookup by label easy - let profiles = AgentProfiles::from_defaults().to_map(); - - let get_profile_command = |label: &str| { - profiles - .get(label) - .map(|p| p.command.build_initial()) - .unwrap_or_else(|| panic!("Profile not found: {label}")) - }; - - let claude_code_command = get_profile_command("claude-code"); - assert!(claude_code_command.contains("npx -y @anthropic-ai/claude-code@latest")); - assert!(claude_code_command.contains("-p")); - assert!(claude_code_command.contains("--dangerously-skip-permissions")); - - let claude_code_router_command = get_profile_command("claude-code-router"); - assert!(claude_code_router_command.contains("npx -y @musistudio/claude-code-router code")); - assert!(claude_code_router_command.contains("-p")); - assert!(claude_code_router_command.contains("--dangerously-skip-permissions")); - - let amp_command = get_profile_command("amp"); - assert!(amp_command.contains("npx -y @sourcegraph/amp@0.0.1752148945-gd8844f")); - assert!(amp_command.contains("--format=jsonl")); - - let gemini_command = get_profile_command("gemini"); - assert!(gemini_command.contains("npx -y @google/gemini-cli@latest")); - assert!(gemini_command.contains("--yolo")); - - let codex_command = get_profile_command("codex"); - assert!(codex_command.contains("npx -y @openai/codex exec")); - assert!(codex_command.contains("--json")); - - let qwen_code_command = get_profile_command("qwen-code"); - assert!(qwen_code_command.contains("npx -y @qwen-code/qwen-code@latest")); - assert!(qwen_code_command.contains("--yolo")); - - let opencode_command = get_profile_command("opencode"); - assert!(opencode_command.contains("npx -y opencode-ai@latest run")); - assert!(opencode_command.contains("--print-logs")); - - let cursor_command = get_profile_command("cursor"); - assert!(cursor_command.contains("cursor-agent")); - assert!(cursor_command.contains("-p")); - assert!(cursor_command.contains("--output-format=stream-json")); - } -} diff --git a/crates/executors/src/executors/amp.rs b/crates/executors/src/executors/amp.rs index 0f9ce3ba..e86387ac 100644 --- a/crates/executors/src/executors/amp.rs +++ b/crates/executors/src/executors/amp.rs @@ -10,7 +10,7 @@ use ts_rs::TS; use utils::{msg_store::MsgStore, path::make_path_relative, shell::get_shell_command}; use crate::{ - command::{AgentProfiles, CommandBuilder}, + command::CommandBuilder, executors::{ExecutorError, StandardCodingAgentExecutor}, logs::{ ActionType, EditDiff, NormalizedEntry, NormalizedEntryType, @@ -22,13 +22,7 @@ use crate::{ /// An executor that uses Amp to process tasks #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct Amp { - command_builder: CommandBuilder, -} - -impl Default for Amp { - fn default() -> Self { - Self::new() - } + pub command: CommandBuilder, } #[async_trait] @@ -39,7 +33,7 @@ impl StandardCodingAgentExecutor for Amp { prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); - let amp_command = self.command_builder.build_initial(); + let amp_command = self.command.build_initial(); let mut command = Command::new(shell_cmd); command @@ -70,7 +64,7 @@ impl StandardCodingAgentExecutor for Amp { ) -> Result { // Use shell command for cross-platform compatibility let (shell_cmd, shell_arg) = get_shell_command(); - let amp_command = self.command_builder.build_follow_up(&[ + let amp_command = self.command.build_follow_up(&[ "threads".to_string(), "continue".to_string(), session_id.to_string(), @@ -203,20 +197,6 @@ impl StandardCodingAgentExecutor for Amp { } } -impl Amp { - pub fn new() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("amp") - .expect("Default amp profile should exist"); - - Self::with_command_builder(profile.command.clone()) - } - - pub fn with_command_builder(command_builder: CommandBuilder) -> Self { - Self { command_builder } - } -} - #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] #[serde(tag = "type")] pub enum AmpJson { diff --git a/crates/executors/src/executors/claude.rs b/crates/executors/src/executors/claude.rs index 14d7a255..14fefae4 100644 --- a/crates/executors/src/executors/claude.rs +++ b/crates/executors/src/executors/claude.rs @@ -11,7 +11,7 @@ use utils::{ }; use crate::{ - command::{AgentProfiles, CommandBuilder}, + command::CommandBuilder, executors::{ExecutorError, StandardCodingAgentExecutor}, logs::{ ActionType, EditDiff, NormalizedEntry, NormalizedEntryType, @@ -23,14 +23,8 @@ use crate::{ /// An executor that uses Claude CLI to process tasks #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct ClaudeCode { - executor_type: String, - command_builder: CommandBuilder, -} - -impl Default for ClaudeCode { - fn default() -> Self { - Self::new() - } + pub command: CommandBuilder, + pub plan: bool, } #[async_trait] @@ -41,7 +35,12 @@ impl StandardCodingAgentExecutor for ClaudeCode { prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); - let claude_command = self.command_builder.build_initial(); + let claude_command = if self.plan { + let base_command = self.command.build_initial(); + create_watchkill_script(&base_command) + } else { + self.command.build_initial() + }; let mut command = Command::new(shell_cmd); command @@ -72,9 +71,15 @@ impl StandardCodingAgentExecutor for ClaudeCode { ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); // Build follow-up command with --resume {session_id} - let claude_command = self - .command_builder - .build_follow_up(&["--resume".to_string(), session_id.to_string()]); + let claude_command = if self.plan { + let base_command = self + .command + .build_follow_up(&["--resume".to_string(), session_id.to_string()]); + create_watchkill_script(&base_command) + } else { + self.command + .build_follow_up(&["--resume".to_string(), session_id.to_string()]) + }; let mut command = Command::new(shell_cmd); command @@ -113,50 +118,6 @@ impl StandardCodingAgentExecutor for ClaudeCode { } } -impl ClaudeCode { - /// Create a new Claude executor with default settings - pub fn new() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("claude-code") - .expect("Default claude-code profile should exist"); - - Self::with_command_builder(profile.label.clone(), profile.command.clone()) - } - - /// Create a new Claude executor in plan mode with watchkill script - pub fn new_plan_mode() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("claude-code-plan") - .expect("Default claude-code-plan profile should exist"); - - let base_command = profile.command.build_initial(); - // Note: We'll need to update this to handle watchkill script properly - // For now, we'll create a custom command builder - let watchkill_command = create_watchkill_script(&base_command); - Self { - executor_type: "ClaudePlan".to_string(), - command_builder: CommandBuilder::new(watchkill_command), - } - } - - /// Create a new Claude executor using claude-code-router - pub fn new_claude_code_router() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("claude-code-router") - .expect("Default claude-code-router profile should exist"); - - Self::with_command_builder(profile.label.clone(), profile.command.clone()) - } - - /// Create a new Claude executor with custom command builder - pub fn with_command_builder(executor_type: String, command_builder: CommandBuilder) -> Self { - Self { - executor_type, - command_builder, - } - } -} - fn create_watchkill_script(command: &str) -> String { let claude_plan_stop_indicator = concat!("Exit ", "plan mode?"); // Use concat!() as a workaround to avoid killing plan mode when this file is read. format!( @@ -1009,7 +970,10 @@ mod tests { use utils::msg_store::MsgStore; - let executor = ClaudeCode::new(); + let executor = ClaudeCode { + command: CommandBuilder::new(""), + plan: false, + }; let msg_store = Arc::new(MsgStore::new()); let current_dir = std::path::PathBuf::from("/tmp/test-worktree"); @@ -1109,23 +1073,4 @@ mod tests { // ToolResult entry is ignored - no third entry } - - #[test] - fn test_claude_executor_command_building() { - // Test default executor produces correct command - let executor = ClaudeCode::new(); - let command = executor.command_builder.build_initial(); - assert!(command.contains("npx -y @anthropic-ai/claude-code@latest")); - assert!(command.contains("-p")); - assert!(command.contains("--dangerously-skip-permissions")); - assert!(command.contains("--verbose")); - assert!(command.contains("--output-format=stream-json")); - - // Test follow-up command - let follow_up = executor - .command_builder - .build_follow_up(&["--resume".to_string(), "test-session-123".to_string()]); - assert!(follow_up.contains("--resume test-session-123")); - assert!(follow_up.contains("-p")); // Still contains base params - } } diff --git a/crates/executors/src/executors/codex.rs b/crates/executors/src/executors/codex.rs index 78441036..29a09e3c 100644 --- a/crates/executors/src/executors/codex.rs +++ b/crates/executors/src/executors/codex.rs @@ -10,7 +10,7 @@ use ts_rs::TS; use utils::{msg_store::MsgStore, path::make_path_relative, shell::get_shell_command}; use crate::{ - command::{AgentProfiles, CommandBuilder}, + command::CommandBuilder, executors::{ExecutorError, StandardCodingAgentExecutor}, logs::{ ActionType, EditDiff, NormalizedEntry, NormalizedEntryType, @@ -102,29 +102,7 @@ impl SessionHandler { /// An executor that uses Codex CLI to process tasks #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct Codex { - command_builder: CommandBuilder, -} - -impl Default for Codex { - fn default() -> Self { - Self::new() - } -} - -impl Codex { - /// Create a new Codex executor with default settings - pub fn new() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("codex") - .expect("Default codex profile should exist"); - - Self::with_command_builder(profile.command.clone()) - } - - /// Create a new Codex executor with custom command builder - pub fn with_command_builder(command_builder: CommandBuilder) -> Self { - Self { command_builder } - } + pub command: CommandBuilder, } #[async_trait] @@ -135,7 +113,7 @@ impl StandardCodingAgentExecutor for Codex { prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); - let codex_command = self.command_builder.build_initial(); + let codex_command = self.command.build_initial(); let mut command = Command::new(shell_cmd); command @@ -173,7 +151,7 @@ impl StandardCodingAgentExecutor for Codex { })?; let (shell_cmd, shell_arg) = get_shell_command(); - let codex_command = self.command_builder.build_follow_up(&[ + let codex_command = self.command.build_follow_up(&[ "-c".to_string(), format!("experimental_resume={}", rollout_file_path.display()), ]); diff --git a/crates/executors/src/executors/cursor.rs b/crates/executors/src/executors/cursor.rs index 48d3b6f5..ef551690 100644 --- a/crates/executors/src/executors/cursor.rs +++ b/crates/executors/src/executors/cursor.rs @@ -10,7 +10,7 @@ use ts_rs::TS; use utils::{msg_store::MsgStore, path::make_path_relative, shell::get_shell_command}; use crate::{ - command::{AgentProfiles, CommandBuilder}, + command::CommandBuilder, executors::{ExecutorError, StandardCodingAgentExecutor}, logs::{ ActionType, EditDiff, NormalizedEntry, NormalizedEntryType, @@ -22,29 +22,7 @@ use crate::{ /// Executor for running Cursor CLI and normalizing its JSONL stream #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct Cursor { - command_builder: CommandBuilder, -} - -impl Default for Cursor { - fn default() -> Self { - Self::new() - } -} - -impl Cursor { - /// Create a new Cursor executor with default profile - pub fn new() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("cursor") - .expect("Default cursor profile should exist"); - - Self::with_command_builder(profile.command.clone()) - } - - /// Create a new Cursor executor with custom command builder - pub fn with_command_builder(command_builder: CommandBuilder) -> Self { - Self { command_builder } - } + pub command: CommandBuilder, } #[async_trait] @@ -55,7 +33,7 @@ impl StandardCodingAgentExecutor for Cursor { prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); - let agent_cmd = self.command_builder.build_initial(); + let agent_cmd = self.command.build_initial(); let mut command = Command::new(shell_cmd); command @@ -85,7 +63,7 @@ impl StandardCodingAgentExecutor for Cursor { ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); let agent_cmd = self - .command_builder + .command .build_follow_up(&["--resume".to_string(), session_id.to_string()]); let mut command = Command::new(shell_cmd); @@ -803,7 +781,9 @@ mod tests { #[tokio::test] async fn test_cursor_streaming_patch_generation() { // Avoid relying on feature flag in tests; construct with a dummy command - let executor = Cursor::with_command_builder(CommandBuilder::new("cursor-agent")); + let executor = Cursor { + command: CommandBuilder::new(""), + }; let msg_store = Arc::new(MsgStore::new()); let current_dir = std::path::PathBuf::from("/tmp/test-worktree"); diff --git a/crates/executors/src/executors/gemini.rs b/crates/executors/src/executors/gemini.rs index b3c44237..b6543a62 100644 --- a/crates/executors/src/executors/gemini.rs +++ b/crates/executors/src/executors/gemini.rs @@ -6,14 +6,14 @@ use futures::{StreamExt, stream::BoxStream}; use serde::{Deserialize, Serialize}; use tokio::{ fs::{self, OpenOptions}, - io::{AsyncReadExt, AsyncWriteExt}, + io::AsyncWriteExt, process::Command, }; use ts_rs::TS; use utils::{msg_store::MsgStore, shell::get_shell_command}; use crate::{ - command::{AgentProfiles, CommandBuilder}, + command::CommandBuilder, executors::{ExecutorError, StandardCodingAgentExecutor}, logs::{ NormalizedEntry, NormalizedEntryType, plain_text_processor::PlainTextLogProcessor, @@ -25,13 +25,7 @@ use crate::{ /// An executor that uses Gemini to process tasks #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct Gemini { - command_builder: CommandBuilder, -} - -impl Default for Gemini { - fn default() -> Self { - Self::new() - } + pub command: CommandBuilder, } #[async_trait] @@ -42,7 +36,7 @@ impl StandardCodingAgentExecutor for Gemini { prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); - let gemini_command = self.command_builder.build_initial(); + let gemini_command = self.command.build_initial(); let mut command = Command::new(shell_cmd); @@ -86,7 +80,7 @@ impl StandardCodingAgentExecutor for Gemini { let followup_prompt = Self::build_followup_prompt(current_dir, prompt).await?; let (shell_cmd, shell_arg) = get_shell_command(); - let gemini_command = self.command_builder.build_follow_up(&[]); + let gemini_command = self.command.build_follow_up(&[]); let mut command = Command::new(shell_cmd); @@ -313,19 +307,3 @@ You are continuing work on the above task. The execution history shows the previ utils::path::get_vibe_kanban_temp_dir().join("gemini_sessions") } } - -impl Gemini { - /// Create a new Gemini executor with default settings - pub fn new() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("gemini") - .expect("Default gemini profile should exist"); - - Self::with_command_builder(profile.command.clone()) - } - - /// Create a new Gemini executor with custom command builder - pub fn with_command_builder(command_builder: CommandBuilder) -> Self { - Self { command_builder } - } -} diff --git a/crates/executors/src/executors/mod.rs b/crates/executors/src/executors/mod.rs index 01d83938..b6ba9487 100644 --- a/crates/executors/src/executors/mod.rs +++ b/crates/executors/src/executors/mod.rs @@ -5,17 +5,17 @@ use command_group::AsyncGroupChild; use enum_dispatch::enum_dispatch; use futures_io::Error as FuturesIoError; use serde::{Deserialize, Serialize}; -use strum_macros::EnumDiscriminants; use thiserror::Error; use ts_rs::TS; use utils::msg_store::MsgStore; use crate::{ - command::AgentProfiles, executors::{ amp::Amp, claude::ClaudeCode, codex::Codex, cursor::Cursor, gemini::Gemini, opencode::Opencode, }, + mcp_config::McpConfig, + profile::{ProfileConfigs, ProfileVariantLabel}, }; pub mod amp; @@ -35,122 +35,118 @@ pub enum ExecutorError { UnknownExecutorType(String), #[error("I/O error: {0}")] Io(std::io::Error), -} - -fn unknown_executor_error(s: &str) -> ExecutorError { - ExecutorError::UnknownExecutorType(format!("Unknown executor type: {s}.")) + #[error(transparent)] + Json(#[from] serde_json::Error), + #[error(transparent)] + TomlSerialize(#[from] toml::ser::Error), + #[error(transparent)] + TomlDeserialize(#[from] toml::de::Error), } #[enum_dispatch] -#[derive( - Debug, Clone, Serialize, Deserialize, PartialEq, TS, EnumDiscriminants, strum_macros::EnumString, -)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] -#[strum(serialize_all = "SCREAMING_SNAKE_CASE")] -#[strum(parse_err_ty = ExecutorError, parse_err_fn = unknown_executor_error)] -#[strum_discriminants( - name(BaseCodingAgent), - derive(strum_macros::Display, Serialize, Deserialize, TS), - strum(serialize_all = "SCREAMING_SNAKE_CASE"), - ts(use_ts_enum), - serde(rename_all = "SCREAMING_SNAKE_CASE") -)] pub enum CodingAgent { - // Echo, - #[serde(alias = "claude")] ClaudeCode, - // ClaudePlan, Amp, Gemini, Codex, - Cursor, - // ClaudeCodeRouter, Opencode, - // Aider, + Cursor, } impl CodingAgent { - /// Create an executor from a profile string - /// Handles both default profiles ("claude-code", "amp", "gemini") and custom profiles - pub fn from_profile_str(profile: &str) -> Result { - match profile { - "claude-code" => Ok(CodingAgent::ClaudeCode(ClaudeCode::new())), - "claude-code-plan" => Ok(CodingAgent::ClaudeCode(ClaudeCode::new_plan_mode())), - "claude-code-router" => { - Ok(CodingAgent::ClaudeCode(ClaudeCode::new_claude_code_router())) - } - "amp" => Ok(CodingAgent::Amp(Amp::new())), - "gemini" => Ok(CodingAgent::Gemini(Gemini::new())), - "codex" => Ok(CodingAgent::Codex(Codex::new())), - "cursor" => Ok(CodingAgent::Cursor(Cursor::new())), - "opencode" => Ok(CodingAgent::Opencode(Opencode::new())), - _ => { - // Try to load from AgentProfiles - if let Some(agent_profile) = AgentProfiles::get_cached().get_profile(profile) { - match agent_profile.agent { - BaseCodingAgent::ClaudeCode => { - Ok(CodingAgent::ClaudeCode(ClaudeCode::with_command_builder( - profile.to_string(), - agent_profile.command.clone(), - ))) - } - BaseCodingAgent::Amp => Ok(CodingAgent::Amp(Amp::with_command_builder( - agent_profile.command.clone(), - ))), - BaseCodingAgent::Gemini => Ok(CodingAgent::Gemini( - Gemini::with_command_builder(agent_profile.command.clone()), - )), - BaseCodingAgent::Codex => Ok(CodingAgent::Codex( - Codex::with_command_builder(agent_profile.command.clone()), - )), - BaseCodingAgent::Opencode => Ok(CodingAgent::Opencode( - Opencode::with_command_builder(agent_profile.command.clone()), - )), - BaseCodingAgent::Cursor => Ok(CodingAgent::Cursor( - Cursor::with_command_builder(agent_profile.command.clone()), - )), - } + /// Create a CodingAgent from a profile variant + /// Loads profile from AgentProfiles (both default and custom profiles) + pub fn from_profile_variant_label( + profile_variant_label: &ProfileVariantLabel, + ) -> Result { + if let Some(profile_config) = + ProfileConfigs::get_cached().get_profile(&profile_variant_label.profile) + { + if let Some(variant_name) = &profile_variant_label.variant { + if let Some(variant) = profile_config.get_variant(&variant_name) { + Ok(variant.agent.clone()) } else { Err(ExecutorError::UnknownExecutorType(format!( - "Unknown profile: {profile}" + "Unknown mode: {}", + variant_name ))) } + } else { + Ok(profile_config.default.agent.clone()) } - } - } -} - -impl BaseCodingAgent { - /// Get the JSON attribute path for MCP servers in the config file - /// Returns None if the executor doesn't support MCP - pub fn mcp_attribute_path(&self) -> Option> { - match self { - //ExecutorConfig::CharmOpencode => Some(vec!["mcpServers"]), - Self::Opencode => Some(vec!["mcp"]), - Self::ClaudeCode => Some(vec!["mcpServers"]), - //ExecutorConfig::ClaudePlan => None, // Claude Plan shares Claude config - Self::Amp => Some(vec!["amp", "mcpServers"]), // Nested path for Amp - Self::Gemini => Some(vec!["mcpServers"]), - //ExecutorConfig::Aider => None, // Aider doesn't support MCP. https://github.com/Aider-AI/aider/issues/3314 - Self::Codex => Some(vec!["mcp_servers"]), // Codex uses TOML with mcp_servers - // Cursor CLI is supposed to be compatible with MCP server config according to the docs: https://docs.cursor.com/en/cli/using#mcp - // But it still doesn't seem to support it properly: https://forum.cursor.com/t/cursor-cli-not-actually-an-mcp-client/127000/5 - Self::Cursor => Some(vec!["mcpServers"]), + } else { + Err(ExecutorError::UnknownExecutorType(format!( + "Unknown profile: {}", + profile_variant_label.profile + ))) } } pub fn supports_mcp(&self) -> bool { - self.mcp_attribute_path().is_some() + self.default_mcp_config_path().is_some() } - pub fn config_path(&self) -> Option { + pub fn get_mcp_config(&self) -> McpConfig { + match self { + Self::Codex(_) => McpConfig::new( + vec!["mcp_servers".to_string()], + serde_json::json!({ + "mcp_servers": {} + }), + serde_json::json!({ + "command": "npx", + "args": ["-y", "vibe-kanban", "--mcp"], + }), + true, + ), + Self::Amp(_) => McpConfig::new( + vec!["amp.mcpServers".to_string()], + serde_json::json!({ + "amp.mcpServers": {} + }), + serde_json::json!({ + "command": "npx", + "args": ["-y", "vibe-kanban", "--mcp"], + }), + false, + ), + Self::Opencode(_) => McpConfig::new( + vec!["mcp".to_string()], + serde_json::json!({ + "mcp": {}, + "$schema": "https://opencode.ai/config.json" + }), + serde_json::json!({ + "type": "local", + "command": ["npx", "-y", "vibe-kanban", "--mcp"], + "enabled": true + }), + false, + ), + _ => McpConfig::new( + vec!["mcpServers".to_string()], + serde_json::json!({ + "mcpServers": {} + }), + serde_json::json!({ + "command": "npx", + "args": ["-y", "vibe-kanban", "--mcp"], + }), + false, + ), + } + } + + pub fn default_mcp_config_path(&self) -> Option { match self { //ExecutorConfig::CharmOpencode => { //dirs::home_dir().map(|home| home.join(".opencode.json")) //} - Self::ClaudeCode => dirs::home_dir().map(|home| home.join(".claude.json")), + Self::ClaudeCode(_) => dirs::home_dir().map(|home| home.join(".claude.json")), //ExecutorConfig::ClaudePlan => dirs::home_dir().map(|home| home.join(".claude.json")), - Self::Opencode => { + Self::Opencode(_) => { #[cfg(unix)] { xdg::BaseDirectories::with_prefix("opencode").get_config_file("opencode.json") @@ -161,10 +157,14 @@ impl BaseCodingAgent { } } //ExecutorConfig::Aider => None, - Self::Codex => dirs::home_dir().map(|home| home.join(".codex").join("config.toml")), - Self::Amp => dirs::config_dir().map(|config| config.join("amp").join("settings.json")), - Self::Gemini => dirs::home_dir().map(|home| home.join(".gemini").join("settings.json")), - Self::Cursor => dirs::home_dir().map(|home| home.join(".cursor").join("mcp.json")), + Self::Codex(_) => dirs::home_dir().map(|home| home.join(".codex").join("config.toml")), + Self::Amp(_) => { + dirs::config_dir().map(|config| config.join("amp").join("settings.json")) + } + Self::Gemini(_) => { + dirs::home_dir().map(|home| home.join(".gemini").join("settings.json")) + } + Self::Cursor(_) => dirs::home_dir().map(|home| home.join(".cursor").join("mcp.json")), } } } diff --git a/crates/executors/src/executors/opencode.rs b/crates/executors/src/executors/opencode.rs index 5b68f4a7..b696c1ef 100644 --- a/crates/executors/src/executors/opencode.rs +++ b/crates/executors/src/executors/opencode.rs @@ -12,7 +12,7 @@ use ts_rs::TS; use utils::{msg_store::MsgStore, path::make_path_relative, shell::get_shell_command}; use crate::{ - command::{AgentProfiles, CommandBuilder}, + command::CommandBuilder, executors::{ExecutorError, StandardCodingAgentExecutor}, logs::{ ActionType, EditDiff, NormalizedEntry, NormalizedEntryType, @@ -24,27 +24,7 @@ use crate::{ /// An executor that uses OpenCode to process tasks #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] pub struct Opencode { - command_builder: CommandBuilder, -} - -impl Default for Opencode { - fn default() -> Self { - Self::new() - } -} - -impl Opencode { - pub fn new() -> Self { - let profile = AgentProfiles::get_cached() - .get_profile("opencode") - .expect("Default opencode profile should exist"); - - Self::with_command_builder(profile.command.clone()) - } - - pub fn with_command_builder(command_builder: CommandBuilder) -> Self { - Self { command_builder } - } + pub command: CommandBuilder, } #[async_trait] @@ -55,7 +35,7 @@ impl StandardCodingAgentExecutor for Opencode { prompt: &str, ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); - let opencode_command = self.command_builder.build_initial(); + let opencode_command = self.command.build_initial(); let mut command = Command::new(shell_cmd); command @@ -87,7 +67,7 @@ impl StandardCodingAgentExecutor for Opencode { ) -> Result { let (shell_cmd, shell_arg) = get_shell_command(); let opencode_command = self - .command_builder + .command .build_follow_up(&["--session".to_string(), session_id.to_string()]); let mut command = Command::new(shell_cmd); diff --git a/crates/executors/src/lib.rs b/crates/executors/src/lib.rs index 46831052..61e04c21 100644 --- a/crates/executors/src/lib.rs +++ b/crates/executors/src/lib.rs @@ -2,4 +2,6 @@ pub mod actions; pub mod command; pub mod executors; pub mod logs; +pub mod mcp_config; +pub mod profile; pub mod stdout_dup; diff --git a/crates/executors/src/mcp_config.rs b/crates/executors/src/mcp_config.rs new file mode 100644 index 00000000..9ba93998 --- /dev/null +++ b/crates/executors/src/mcp_config.rs @@ -0,0 +1,81 @@ +//! Utilities for reading and writing external agent config files (not the server's own config). +//! +//! These helpers abstract over JSON vs TOML formats used by different agents. + +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use tokio::fs; +use ts_rs::TS; + +use crate::executors::ExecutorError; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +pub struct McpConfig { + servers: HashMap, + pub servers_path: Vec, + pub template: serde_json::Value, + pub vibe_kanban: serde_json::Value, + pub is_toml_config: bool, +} + +impl McpConfig { + pub fn new( + servers_path: Vec, + template: serde_json::Value, + vibe_kanban: serde_json::Value, + is_toml_config: bool, + ) -> Self { + Self { + servers: HashMap::new(), + servers_path, + template, + vibe_kanban, + is_toml_config, + } + } + pub fn set_servers(&mut self, servers: HashMap) { + self.servers = servers; + } +} + +/// Read an agent's external config file (JSON or TOML) and normalize it to serde_json::Value. +pub async fn read_agent_config( + config_path: &std::path::Path, + mcp_config: &McpConfig, +) -> Result { + if let Ok(file_content) = fs::read_to_string(config_path).await { + if mcp_config.is_toml_config { + // Parse TOML then convert to JSON Value + if file_content.trim().is_empty() { + return Ok(serde_json::json!({})); + } + let toml_val: toml::Value = toml::from_str(&file_content)?; + let json_string = serde_json::to_string(&toml_val)?; + Ok(serde_json::from_str(&json_string)?) + } else { + Ok(serde_json::from_str(&file_content)?) + } + } else { + Ok(mcp_config.template.clone()) + } +} + +/// Write an agent's external config (as serde_json::Value) back to disk in the agent's format (JSON or TOML). +pub async fn write_agent_config( + config_path: &std::path::Path, + mcp_config: &McpConfig, + config: &Value, +) -> Result<(), ExecutorError> { + if mcp_config.is_toml_config { + // Convert JSON Value back to TOML + let toml_value: toml::Value = serde_json::from_str(&serde_json::to_string(config)?)?; + let toml_content = toml::to_string_pretty(&toml_value)?; + fs::write(config_path, toml_content).await?; + } else { + let json_content = serde_json::to_string_pretty(config)?; + fs::write(config_path, json_content).await?; + } + Ok(()) +} diff --git a/crates/executors/src/profile.rs b/crates/executors/src/profile.rs new file mode 100644 index 00000000..3cd9e401 --- /dev/null +++ b/crates/executors/src/profile.rs @@ -0,0 +1,288 @@ +use std::{ + collections::{HashMap, HashSet}, + fs, + path::PathBuf, + sync::RwLock, +}; + +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::executors::CodingAgent; + +lazy_static! { + static ref PROFILES_CACHE: RwLock = RwLock::new(ProfileConfigs::load()); +} + +// Default profiels embedded at compile time +const DEFAULT_PROFILES_JSON: &str = include_str!("../default_profiles.json"); + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] +pub struct VariantAgentConfig { + /// Unique identifier for this profile (e.g., "MyClaudeCode", "FastAmp") + pub label: String, + /// The coding agent this profile is associated with + #[serde(flatten)] + pub agent: CodingAgent, + /// Optional profile-specific MCP config file path (absolute; supports leading ~). Overrides the default `BaseCodingAgent` config path + pub mcp_config_path: Option, +} +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] +pub struct ProfileConfig { + #[serde(flatten)] + /// default profile variant + pub default: VariantAgentConfig, + /// additional variants for this profile, e.g. plan, review, subagent + pub variants: Vec, +} + +impl ProfileConfig { + pub fn get_variant(&self, variant: &str) -> Option<&VariantAgentConfig> { + self.variants.iter().find(|m| m.label == variant) + } + + pub fn get_mcp_config_path(&self) -> Option { + match self.default.mcp_config_path.as_ref() { + Some(path) => Some(PathBuf::from(path)), + None => self.default.agent.default_mcp_config_path(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] +pub struct ProfileVariantLabel { + pub profile: String, + pub variant: Option, +} + +impl ProfileVariantLabel { + pub fn default(profile: String) -> Self { + Self { + profile, + variant: None, + } + } + pub fn with_variant(profile: String, mode: String) -> Self { + Self { + profile, + variant: Some(mode), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)] +pub struct ProfileConfigs { + pub profiles: Vec, +} + +impl ProfileConfigs { + pub fn get_cached() -> ProfileConfigs { + PROFILES_CACHE.read().unwrap().clone() + } + + pub fn reload() { + let mut cache = PROFILES_CACHE.write().unwrap(); + *cache = Self::load(); + } + + fn load() -> Self { + let profiles_path = utils::assets::profiles_path(); + + // load from profiles.json if it exists, otherwise use defaults + let content = match fs::read_to_string(&profiles_path) { + Ok(content) => content, + Err(e) => { + tracing::warn!("Failed to read profiles.json: {}, using defaults", e); + return Self::from_defaults(); + } + }; + + match serde_json::from_str::(&content) { + Ok(profiles) => { + tracing::info!("Loaded all profiles from profiles.json"); + profiles + } + Err(e) => { + tracing::warn!("Failed to parse profiles.json: {}, using defaults", e); + Self::from_defaults() + } + } + } + + pub fn from_defaults() -> Self { + serde_json::from_str(DEFAULT_PROFILES_JSON).unwrap_or_else(|e| { + tracing::error!("Failed to parse embedded default_profiles.json: {}", e); + panic!("Default profiles JSON is invalid") + }) + } + + pub fn extend_from_file(&mut self) -> Result<(), std::io::Error> { + let profiles_path = utils::assets::profiles_path(); + if !profiles_path.exists() { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + format!("Profiles file not found at {profiles_path:?}"), + )); + } + + let content = fs::read_to_string(&profiles_path)?; + + let user_profiles: Self = serde_json::from_str(&content).map_err(|e| { + std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Failed to parse profiles.json: {e}"), + ) + })?; + + let default_labels: HashSet = self + .profiles + .iter() + .map(|p| p.default.label.clone()) + .collect(); + + // Only add user profiles with unique labels + for user_profile in user_profiles.profiles { + if !default_labels.contains(&user_profile.default.label) { + self.profiles.push(user_profile); + } else { + tracing::debug!( + "Skipping user profile '{}' - default with same label exists", + user_profile.default.label + ); + } + } + + Ok(()) + } + + pub fn get_profile(&self, label: &str) -> Option<&ProfileConfig> { + self.profiles.iter().find(|p| p.default.label == label) + } + + pub fn to_map(&self) -> HashMap { + self.profiles + .iter() + .map(|p| (p.default.label.clone(), p.clone())) + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn default_profiles_have_expected_base_and_noninteractive_or_json_flags() { + // Build default profiles and make lookup by label easy + let profiles = ProfileConfigs::from_defaults().to_map(); + + let get_profile_command = |label: &str| { + profiles + .get(label) + .map(|p| { + use crate::executors::CodingAgent; + match &p.default.agent { + CodingAgent::ClaudeCode(claude) => claude.command.build_initial(), + CodingAgent::Amp(amp) => amp.command.build_initial(), + CodingAgent::Gemini(gemini) => gemini.command.build_initial(), + CodingAgent::Codex(codex) => codex.command.build_initial(), + CodingAgent::Opencode(opencode) => opencode.command.build_initial(), + CodingAgent::Cursor(cursor) => cursor.command.build_initial(), + } + }) + .unwrap_or_else(|| panic!("Profile not found: {label}")) + }; + let profiles = ProfileConfigs::from_defaults(); + assert!(profiles.profiles.len() == 8); + + let claude_code_command = get_profile_command("claude-code"); + assert!(claude_code_command.contains("npx -y @anthropic-ai/claude-code@latest")); + assert!(claude_code_command.contains("-p")); + assert!(claude_code_command.contains("--dangerously-skip-permissions")); + + let claude_code_router_command = get_profile_command("claude-code-router"); + assert!(claude_code_router_command.contains("npx -y @musistudio/claude-code-router code")); + assert!(claude_code_router_command.contains("-p")); + assert!(claude_code_router_command.contains("--dangerously-skip-permissions")); + + let amp_command = get_profile_command("amp"); + assert!(amp_command.contains("npx -y @sourcegraph/amp@0.0.1752148945-gd8844f")); + assert!(amp_command.contains("--format=jsonl")); + + let gemini_command = get_profile_command("gemini"); + assert!(gemini_command.contains("npx -y @google/gemini-cli@latest")); + assert!(gemini_command.contains("--yolo")); + + let codex_command = get_profile_command("codex"); + assert!(codex_command.contains("npx -y @openai/codex exec")); + assert!(codex_command.contains("--json")); + + let qwen_code_command = get_profile_command("qwen-code"); + assert!(qwen_code_command.contains("npx -y @qwen-code/qwen-code@latest")); + assert!(qwen_code_command.contains("--yolo")); + + let opencode_command = get_profile_command("opencode"); + assert!(opencode_command.contains("npx -y opencode-ai@latest run")); + assert!(opencode_command.contains("--print-logs")); + + let cursor_command = get_profile_command("cursor"); + assert!(cursor_command.contains("cursor-agent")); + assert!(cursor_command.contains("-p")); + assert!(cursor_command.contains("--output-format=stream-json")); + } + + #[test] + fn test_flattened_agent_deserialization() { + let test_json = r#"{ + "profiles": [ + { + "label": "test-claude", + "mcp_config_path": null, + "CLAUDE_CODE": { + "command": { + "base": "npx claude", + "params": ["--test"] + }, + "plan": true + }, + "variants": [] + }, + { + "label": "test-gemini", + "mcp_config_path": null, + "GEMINI": { + "command": { + "base": "npx gemini", + "params": ["--test"] + } + }, + "variants": [] + } + ] + }"#; + + let profiles: ProfileConfigs = serde_json::from_str(test_json).expect("Should deserialize"); + assert_eq!(profiles.profiles.len(), 2); + + // Test Claude profile + let claude_profile = profiles.get_profile("test-claude").unwrap(); + match &claude_profile.default.agent { + crate::executors::CodingAgent::ClaudeCode(claude) => { + assert_eq!(claude.command.base, "npx claude"); + assert_eq!(claude.command.params.as_ref().unwrap()[0], "--test"); + assert_eq!(claude.plan, true); + } + _ => panic!("Expected ClaudeCode agent"), + } + + // Test Gemini profile + let gemini_profile = profiles.get_profile("test-gemini").unwrap(); + match &gemini_profile.default.agent { + crate::executors::CodingAgent::Gemini(gemini) => { + assert_eq!(gemini.command.base, "npx gemini"); + assert_eq!(gemini.command.params.as_ref().unwrap()[0], "--test"); + } + _ => panic!("Expected Gemini agent"), + } + } +} diff --git a/crates/server/Cargo.toml b/crates/server/Cargo.toml index 15abd463..eba0d403 100644 --- a/crates/server/Cargo.toml +++ b/crates/server/Cargo.toml @@ -25,7 +25,7 @@ tracing-subscriber = { workspace = true } sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "sqlite", "chrono", "uuid"] } chrono = { version = "0.4", features = ["serde"] } uuid = { version = "1.0", features = ["v4", "serde"] } -ts-rs = { workspace = true } +ts-rs = { workspace = true, features = ["serde-json-impl"]} async-trait = "0.1" command-group = { version = "5.0", features = ["with-tokio"] } nix = { version = "0.29", features = ["signal", "process"] } diff --git a/crates/server/src/bin/generate_types.rs b/crates/server/src/bin/generate_types.rs index 73c929fd..38066892 100644 --- a/crates/server/src/bin/generate_types.rs +++ b/crates/server/src/bin/generate_types.rs @@ -17,6 +17,12 @@ fn generate_types_content() -> String { db::models::project::UpdateProject::decl(), db::models::project::SearchResult::decl(), db::models::project::SearchMatchType::decl(), + executors::actions::ExecutorAction::decl(), + executors::mcp_config::McpConfig::decl(), + executors::actions::ExecutorActionType::decl(), + executors::actions::script::ScriptContext::decl(), + executors::actions::script::ScriptRequest::decl(), + executors::actions::script::ScriptRequestLanguage::decl(), db::models::task_template::TaskTemplate::decl(), db::models::task_template::CreateTaskTemplate::decl(), db::models::task_template::UpdateTaskTemplate::decl(), @@ -28,6 +34,9 @@ fn generate_types_content() -> String { utils::response::ApiResponse::<()>::decl(), server::routes::config::UserSystemInfo::decl(), server::routes::config::Environment::decl(), + server::routes::config::McpServerQuery::decl(), + server::routes::config::UpdateMcpServersBody::decl(), + server::routes::config::GetMcpServerResponse::decl(), server::routes::task_attempts::CreateFollowUpAttempt::decl(), server::routes::task_attempts::CreateGitHubPrRequest::decl(), services::services::github_service::GitHubServiceError::decl(), @@ -46,17 +55,23 @@ fn generate_types_content() -> String { utils::diff::Diff::decl(), utils::diff::FileDiffDetails::decl(), services::services::github_service::RepositoryInfo::decl(), - executors::executors::BaseCodingAgent::decl(), executors::command::CommandBuilder::decl(), - executors::command::AgentProfile::decl(), - executors::command::AgentProfiles::decl(), + executors::profile::ProfileVariantLabel::decl(), + executors::profile::ProfileConfig::decl(), + executors::profile::VariantAgentConfig::decl(), + executors::profile::ProfileConfigs::decl(), + executors::executors::claude::ClaudeCode::decl(), + executors::executors::gemini::Gemini::decl(), + executors::executors::amp::Amp::decl(), + executors::executors::codex::Codex::decl(), + executors::executors::cursor::Cursor::decl(), + executors::executors::opencode::Opencode::decl(), executors::actions::coding_agent_initial::CodingAgentInitialRequest::decl(), executors::actions::coding_agent_follow_up::CodingAgentFollowUpRequest::decl(), server::routes::task_attempts::CreateTaskAttemptBody::decl(), server::routes::task_attempts::RebaseTaskAttemptRequest::decl(), db::models::task_attempt::TaskAttempt::decl(), db::models::execution_process::ExecutionProcess::decl(), - db::models::execution_process::ExecutionProcessSummary::decl(), db::models::execution_process::ExecutionProcessStatus::decl(), db::models::execution_process::ExecutionProcessRunReason::decl(), services::services::events::EventPatch::decl(), @@ -68,6 +83,7 @@ fn generate_types_content() -> String { executors::logs::EditDiff::decl(), executors::logs::ActionType::decl(), executors::logs::utils::patch::PatchType::decl(), + serde_json::Value::decl(), ]; let body = decls diff --git a/crates/server/src/mcp/agent_config.rs b/crates/server/src/mcp/agent_config.rs deleted file mode 100644 index a92a55ad..00000000 --- a/crates/server/src/mcp/agent_config.rs +++ /dev/null @@ -1,56 +0,0 @@ -//! Utilities for reading and writing external agent config files (not the server's own config). -//! -//! These helpers abstract over JSON vs TOML formats used by different agents. - -use executors::executors::BaseCodingAgent; -use serde_json::Value; -use tokio::fs; - -/// Determine if the agent's config file is TOML-based. -fn is_toml_config(agent: &BaseCodingAgent) -> bool { - matches!(agent, BaseCodingAgent::Codex) -} - -/// Read an agent's external config file (JSON or TOML) and normalize it to serde_json::Value. -pub async fn read_agent_config( - config_path: &std::path::Path, - agent: &BaseCodingAgent, -) -> Result> { - let file_content = fs::read_to_string(config_path).await.unwrap_or_else(|_| { - if is_toml_config(agent) { - "".to_string() - } else { - "{}".to_string() - } - }); - - if is_toml_config(agent) { - // Parse TOML then convert to JSON Value - if file_content.trim().is_empty() { - return Ok(serde_json::json!({})); - } - let toml_val: toml::Value = toml::from_str(&file_content)?; - let json_string = serde_json::to_string(&toml_val)?; - Ok(serde_json::from_str(&json_string)?) - } else { - Ok(serde_json::from_str(&file_content)?) - } -} - -/// Write an agent's external config (as serde_json::Value) back to disk in the agent's format (JSON or TOML). -pub async fn write_agent_config( - config_path: &std::path::Path, - agent: &BaseCodingAgent, - config: &Value, -) -> Result<(), Box> { - if is_toml_config(agent) { - // Convert JSON Value back to TOML - let toml_value: toml::Value = serde_json::from_str(&serde_json::to_string(config)?)?; - let toml_content = toml::to_string_pretty(&toml_value)?; - fs::write(config_path, toml_content).await?; - } else { - let json_content = serde_json::to_string_pretty(config)?; - fs::write(config_path, json_content).await?; - } - Ok(()) -} diff --git a/crates/server/src/mcp/mod.rs b/crates/server/src/mcp/mod.rs index 14379252..8420256b 100644 --- a/crates/server/src/mcp/mod.rs +++ b/crates/server/src/mcp/mod.rs @@ -1,2 +1 @@ -pub mod agent_config; pub mod task_server; diff --git a/crates/server/src/routes/config.rs b/crates/server/src/routes/config.rs index e6e25eaf..37a0a7ee 100644 --- a/crates/server/src/routes/config.rs +++ b/crates/server/src/routes/config.rs @@ -9,19 +9,18 @@ use axum::{ Json, Router, }; use deployment::{Deployment, DeploymentError}; -use executors::{command::AgentProfiles, executors::BaseCodingAgent}; +use executors::{ + mcp_config::{read_agent_config, write_agent_config, McpConfig}, + profile::ProfileConfigs, +}; use serde::{Deserialize, Serialize}; use serde_json::Value; -use services::services::config::{save_config_to_file, Config, SoundFile}; +use services::services::config::{save_config_to_file, Config, ConfigError, SoundFile}; use tokio::fs; use ts_rs::TS; -use utils::{assets::config_path, path::expand_tilde, response::ApiResponse}; +use utils::{assets::config_path, response::ApiResponse}; -use crate::{ - error::ApiError, - mcp::agent_config::{read_agent_config, write_agent_config}, - DeploymentImpl, -}; +use crate::{error::ApiError, DeploymentImpl}; pub fn router() -> Router { Router::new() @@ -29,6 +28,7 @@ pub fn router() -> Router { .route("/config", put(update_config)) .route("/sounds/{sound}", get(get_sound)) .route("/mcp-config", get(get_mcp_servers).post(update_mcp_servers)) + .route("/profiles", get(get_profiles).put(update_profiles)) } #[derive(Debug, Serialize, Deserialize, TS)] @@ -61,7 +61,7 @@ impl Environment { pub struct UserSystemInfo { pub config: Config, #[serde(flatten)] - pub profiles: AgentProfiles, + pub profiles: ProfileConfigs, pub environment: Environment, } @@ -74,7 +74,7 @@ async fn get_user_system_info( let user_system_info = UserSystemInfo { config: config.clone(), - profiles: AgentProfiles::get_cached().clone(), + profiles: ProfileConfigs::get_cached(), environment: Environment::new(), }; @@ -112,77 +112,77 @@ async fn get_sound(Path(sound): Path) -> Result { Ok(response) } -#[derive(Debug, Deserialize)] -struct McpServerQuery { - base_coding_agent: Option, - mcp_config_path: Option, +#[derive(TS, Debug, Deserialize)] +pub struct McpServerQuery { + profile: String, +} + +#[derive(TS, Debug, Serialize, Deserialize)] +pub struct GetMcpServerResponse { + // servers: HashMap, + mcp_config: McpConfig, + config_path: String, +} + +#[derive(TS, Debug, Serialize, Deserialize)] +pub struct UpdateMcpServersBody { + servers: HashMap, } async fn get_mcp_servers( - State(deployment): State, + State(_deployment): State, Query(query): Query, -) -> Result>, ApiError> { - let agent = match query.base_coding_agent { - Some(executor) => executor, - None => { - let config = deployment.config().read().await; - let profile = executors::command::AgentProfiles::get_cached() - .get_profile(&config.profile) - .expect("Corrupted config"); - profile.agent - } - }; +) -> Result>, ApiError> { + let profiles = ProfileConfigs::get_cached(); + let profile = profiles.get_profile(&query.profile).ok_or_else(|| { + ApiError::Config(ConfigError::ValidationError(format!( + "Profile not found: {}", + query.profile + ))) + })?; - if !agent.supports_mcp() { + if !profile.default.agent.supports_mcp() { return Ok(ResponseJson(ApiResponse::error( "This executor does not support MCP servers", ))); } // Resolve supplied config path or agent default - let config_path = if let Some(path_str) = &query.mcp_config_path { - expand_tilde(path_str) - } else { - match agent.config_path() { - Some(path) => path, - None => { - return Ok(ResponseJson(ApiResponse::error( - "Could not determine config file path", - ))) - } + let config_path = match profile.get_mcp_config_path() { + Some(path) => path, + None => { + return Ok(ResponseJson(ApiResponse::error( + "Could not determine config file path", + ))); } }; - match read_mcp_servers_from_config(&config_path, &agent).await { - Ok(servers) => { - let response_data = serde_json::json!({ - "servers": servers, - "config_path": config_path.to_string_lossy().to_string() - }); - Ok(ResponseJson(ApiResponse::success(response_data))) - } - Err(e) => Ok(ResponseJson(ApiResponse::error(&format!( - "Failed to read MCP servers: {}", - e - )))), - } + let mut mcpc = profile.default.agent.get_mcp_config(); + let raw_config = read_agent_config(&config_path, &mcpc).await?; + let servers = get_mcp_servers_from_config_path(&raw_config, &mcpc.servers_path); + mcpc.set_servers(servers); + Ok(ResponseJson(ApiResponse::success(GetMcpServerResponse { + mcp_config: mcpc, + config_path: config_path.to_string_lossy().to_string(), + }))) } async fn update_mcp_servers( - State(deployment): State, + State(_deployment): State, Query(query): Query, - Json(new_servers): Json>, + Json(payload): Json, ) -> Result>, ApiError> { - let agent = match query.base_coding_agent { - Some(executor) => executor, - None => { - let config = deployment.config().read().await; - let profile = executors::command::AgentProfiles::get_cached() - .get_profile(&config.profile) - .expect("Corrupted config"); - profile.agent - } - }; + let profiles = ProfileConfigs::get_cached(); + let agent = &profiles + .get_profile(&query.profile) + .ok_or_else(|| { + ApiError::Config(ConfigError::ValidationError(format!( + "Profile not found: {}", + query.profile + ))) + })? + .default + .agent; if !agent.supports_mcp() { return Ok(ResponseJson(ApiResponse::error( @@ -191,20 +191,17 @@ async fn update_mcp_servers( } // Resolve supplied config path or agent default - let config_path = if let Some(path_str) = &query.mcp_config_path { - expand_tilde(path_str) - } else { - match agent.config_path() { - Some(path) => path, - None => { - return Ok(ResponseJson(ApiResponse::error( - "Could not determine config file path", - ))) - } + let config_path = match agent.default_mcp_config_path() { + Some(path) => path, + None => { + return Ok(ResponseJson(ApiResponse::error( + "Could not determine config file path", + ))) } }; - match update_mcp_servers_in_config(&config_path, &agent, new_servers).await { + let mcpc = agent.get_mcp_config(); + match update_mcp_servers_in_config(&config_path, &mcpc, payload.servers).await { Ok(message) => Ok(ResponseJson(ApiResponse::success(message))), Err(e) => Ok(ResponseJson(ApiResponse::error(&format!( "Failed to update MCP servers: {}", @@ -215,27 +212,24 @@ async fn update_mcp_servers( async fn update_mcp_servers_in_config( config_path: &std::path::Path, - agent: &BaseCodingAgent, + mcpc: &McpConfig, new_servers: HashMap, ) -> Result> { // Ensure parent directory exists if let Some(parent) = config_path.parent() { fs::create_dir_all(parent).await?; } - // Read existing config (JSON or TOML depending on agent) - let mut config = read_agent_config(config_path, agent).await?; - - let mcp_path = agent.mcp_attribute_path().unwrap(); + let mut config = read_agent_config(config_path, mcpc).await?; // Get the current server count for comparison - let old_servers = get_mcp_servers_from_config_path(agent, &config, &mcp_path).len(); + let old_servers = get_mcp_servers_from_config_path(&config, &mcpc.servers_path).len(); // Set the MCP servers using the correct attribute path - set_mcp_servers_in_config_path(agent, &mut config, &mcp_path, &new_servers)?; + set_mcp_servers_in_config_path(&mut config, &mcpc.servers_path, &new_servers)?; // Write the updated config back to file (JSON or TOML depending on agent) - write_agent_config(config_path, agent, &config).await?; + write_agent_config(config_path, mcpc, &config).await?; let new_count = new_servers.len(); let message = match (old_servers, new_count) { @@ -251,42 +245,15 @@ async fn update_mcp_servers_in_config( Ok(message) } -async fn read_mcp_servers_from_config( - config_path: &std::path::Path, - agent: &BaseCodingAgent, -) -> Result, Box> { - // Read config in appropriate format (JSON or TOML) and normalize to serde_json::Value - let raw_config = read_agent_config(config_path, agent).await?; - let mcp_path = agent.mcp_attribute_path().unwrap(); - let servers = get_mcp_servers_from_config_path(agent, &raw_config, &mcp_path); - Ok(servers) -} - /// Helper function to get MCP servers from config using a path -fn get_mcp_servers_from_config_path( - agent: &BaseCodingAgent, - raw_config: &Value, - path: &[&str], -) -> HashMap { - // Special handling for AMP - use flat key structure - let current = if matches!(agent, BaseCodingAgent::Amp) { - let flat_key = format!("{}.{}", path[0], path[1]); - let current = match raw_config.get(&flat_key) { +fn get_mcp_servers_from_config_path(raw_config: &Value, path: &[String]) -> HashMap { + let mut current = raw_config; + for part in path { + current = match current.get(part) { Some(val) => val, None => return HashMap::new(), }; - current - } else { - let mut current = raw_config; - for &part in path { - current = match current.get(part) { - Some(val) => val, - None => return HashMap::new(), - }; - } - current - }; - + } // Extract the servers object match current.as_object() { Some(servers) => servers @@ -299,9 +266,8 @@ fn get_mcp_servers_from_config_path( /// Helper function to set MCP servers in config using a path fn set_mcp_servers_in_config_path( - agent: &BaseCodingAgent, raw_config: &mut Value, - path: &[&str], + path: &[String], servers: &HashMap, ) -> Result<(), Box> { // Ensure config is an object @@ -309,20 +275,9 @@ fn set_mcp_servers_in_config_path( *raw_config = serde_json::json!({}); } - // Special handling for AMP - use flat key structure - if matches!(agent, BaseCodingAgent::Amp) { - let flat_key = format!("{}.{}", path[0], path[1]); - raw_config - .as_object_mut() - .unwrap() - .insert(flat_key, serde_json::to_value(servers)?); - return Ok(()); - } - let mut current = raw_config; - // Navigate/create the nested structure (all parts except the last) - for &part in &path[..path.len() - 1] { + for part in &path[..path.len() - 1] { if current.get(part).is_none() { current .as_object_mut() @@ -344,3 +299,83 @@ fn set_mcp_servers_in_config_path( Ok(()) } + +#[derive(Debug, Serialize, Deserialize)] +pub struct ProfilesContent { + pub content: String, + pub path: String, +} + +async fn get_profiles( + State(_deployment): State, +) -> ResponseJson> { + let profiles_path = utils::assets::profiles_path(); + + let mut profiles = ProfileConfigs::from_defaults(); + if let Ok(user_content) = std::fs::read_to_string(&profiles_path) { + match serde_json::from_str::(&user_content) { + Ok(user_profiles) => { + // Override defaults with user profiles that have the same label + for user_profile in user_profiles.profiles { + if let Some(default_profile) = profiles + .profiles + .iter_mut() + .find(|p| p.default.label == user_profile.default.label) + { + *default_profile = user_profile; + } else { + profiles.profiles.push(user_profile); + } + } + } + Err(e) => { + tracing::error!("Failed to parse profiles.json: {}", e); + } + } + } + + let content = serde_json::to_string_pretty(&profiles).unwrap_or_else(|e| { + tracing::error!("Failed to serialize profiles to JSON: {}", e); + serde_json::to_string_pretty(&ProfileConfigs::from_defaults()) + .unwrap_or_else(|_| "{}".to_string()) + }); + + ResponseJson(ApiResponse::success(ProfilesContent { + content, + path: profiles_path.display().to_string(), + })) +} + +async fn update_profiles( + State(_deployment): State, + body: String, +) -> ResponseJson> { + let profiles: ProfileConfigs = match serde_json::from_str(&body) { + Ok(p) => p, + Err(e) => { + return ResponseJson(ApiResponse::error(&format!( + "Invalid profiles format: {}", + e + ))) + } + }; + + let profiles_path = utils::assets::profiles_path(); + + // Simply save all profiles as provided by the user + let formatted = serde_json::to_string_pretty(&profiles).unwrap(); + match fs::write(&profiles_path, formatted).await { + Ok(_) => { + tracing::info!("All profiles saved to {:?}", profiles_path); + // Reload the cached profiles + ProfileConfigs::reload(); + ResponseJson(ApiResponse::success( + "Profiles updated successfully".to_string(), + )) + } + Err(e) => ResponseJson(ApiResponse::error(&format!( + "Failed to save profiles: {}", + e + ))), + } +} diff --git a/crates/server/src/routes/task_attempts.rs b/crates/server/src/routes/task_attempts.rs index c6ddfd8a..673deb63 100644 --- a/crates/server/src/routes/task_attempts.rs +++ b/crates/server/src/routes/task_attempts.rs @@ -16,10 +16,13 @@ use db::models::{ task_attempt::{CreateTaskAttempt, TaskAttempt, TaskAttemptError}, }; use deployment::Deployment; -use executors::actions::{ - coding_agent_follow_up::CodingAgentFollowUpRequest, - script::{ScriptContext, ScriptRequest, ScriptRequestLanguage}, - ExecutorAction, ExecutorActionKind, ExecutorActionType, +use executors::{ + actions::{ + coding_agent_follow_up::CodingAgentFollowUpRequest, + script::{ScriptContext, ScriptRequest, ScriptRequestLanguage}, + ExecutorAction, ExecutorActionType, + }, + profile::{ProfileConfigs, ProfileVariantLabel}, }; use futures_util::TryStreamExt; use serde::{Deserialize, Serialize}; @@ -241,7 +244,7 @@ pub async fn get_task_attempt( #[derive(Debug, Deserialize, ts_rs::TS)] pub struct CreateTaskAttemptBody { pub task_id: Uuid, - pub profile: Option, + pub profile_variant_label: Option, pub base_branch: String, } @@ -250,23 +253,24 @@ pub async fn create_task_attempt( State(deployment): State, Json(payload): Json, ) -> Result>, ApiError> { - let profile_label = payload - .profile - .unwrap_or(deployment.config().read().await.profile.to_string()); + let profile_variant_label = payload + .profile_variant_label + .unwrap_or(deployment.config().read().await.profile.clone()); - let profile = executors::command::AgentProfiles::get_cached() - .get_profile(&profile_label) + let profiles = ProfileConfigs::get_cached(); + let profile = profiles + .get_profile(&profile_variant_label.profile) .ok_or_else(|| { ApiError::TaskAttempt(TaskAttemptError::ValidationError(format!( "Profile not found: {}", - profile_label + profile_variant_label.profile ))) })?; let task_attempt = TaskAttempt::create( &deployment.db().pool, &CreateTaskAttempt { - base_coding_agent: profile.agent.to_string(), + profile: profile.default.label.clone(), base_branch: payload.base_branch, }, payload.task_id, @@ -275,7 +279,7 @@ pub async fn create_task_attempt( let execution_process = deployment .container() - .start_attempt(&task_attempt, profile_label.clone()) + .start_attempt(&task_attempt, profile_variant_label.clone()) .await?; deployment @@ -283,8 +287,8 @@ pub async fn create_task_attempt( "task_attempt_started", serde_json::json!({ "task_id": task_attempt.task_id.to_string(), - "profile": &profile_label, - "base_coding_agent": profile.agent.to_string(), + "variant": &profile_variant_label.variant, + "profile": profile.default.label, "attempt_id": task_attempt.id.to_string(), }), ) @@ -298,6 +302,7 @@ pub async fn create_task_attempt( #[derive(Debug, Deserialize, TS)] pub struct CreateFollowUpAttempt { pub prompt: String, + pub variant: Option, } pub async fn follow_up( @@ -308,10 +313,10 @@ pub async fn follow_up( tracing::info!("{:?}", task_attempt); // First, get the most recent execution process with executor action type = StandardCoding - let initial_execution_process = ExecutionProcess::find_latest_by_task_attempt_and_action_type( + let latest_execution_process = ExecutionProcess::find_latest_by_task_attempt_and_run_reason( &deployment.db().pool, task_attempt.id, - &ExecutorActionKind::CodingAgentInitialRequest, + &ExecutionProcessRunReason::CodingAgent, ) .await? .ok_or(ApiError::TaskAttempt(TaskAttemptError::ValidationError( @@ -321,7 +326,7 @@ pub async fn follow_up( // Get session_id let session_id = ExecutorSession::find_by_execution_process_id( &deployment.db().pool, - initial_execution_process.id, + latest_execution_process.id, ) .await? .ok_or(ApiError::TaskAttempt(TaskAttemptError::ValidationError( @@ -331,18 +336,27 @@ pub async fn follow_up( .ok_or(ApiError::TaskAttempt(TaskAttemptError::ValidationError( "This executor session doesn't have a session_id".to_string(), )))?; - - let profile = match &initial_execution_process + let initial_profile_variant_label = match &latest_execution_process .executor_action() .map_err(|e| ApiError::TaskAttempt(TaskAttemptError::ValidationError(e.to_string())))? .typ { - ExecutorActionType::CodingAgentInitialRequest(request) => Ok(request.profile.clone()), + ExecutorActionType::CodingAgentInitialRequest(request) => { + Ok(request.profile_variant_label.clone()) + } + ExecutorActionType::CodingAgentFollowUpRequest(request) => { + Ok(request.profile_variant_label.clone()) + } _ => Err(ApiError::TaskAttempt(TaskAttemptError::ValidationError( "Couldn't find profile from initial request".to_string(), ))), }?; + let profile_variant_label = ProfileVariantLabel { + profile: initial_profile_variant_label.profile, + variant: payload.variant, + }; + // Get parent task let task = task_attempt .parent_task(&deployment.db().pool) @@ -370,7 +384,7 @@ pub async fn follow_up( ExecutorActionType::CodingAgentFollowUpRequest(CodingAgentFollowUpRequest { prompt: payload.prompt, session_id, - profile, + profile_variant_label, }), cleanup_action, ); diff --git a/crates/server/src/routes/tasks.rs b/crates/server/src/routes/tasks.rs index 39127f8b..28154650 100644 --- a/crates/server/src/routes/tasks.rs +++ b/crates/server/src/routes/tasks.rs @@ -90,25 +90,25 @@ pub async fn create_task_and_start( .await; // use the default executor profile and the current branch for the task attempt - let default_profile_label = deployment.config().read().await.profile.clone().to_string(); + let default_profile_variant = deployment.config().read().await.profile.clone(); let project = Project::find_by_id(&deployment.db().pool, payload.project_id) .await? .ok_or(ApiError::Database(SqlxError::RowNotFound))?; let branch = GitService::new().get_current_branch(&project.git_repo_path)?; - let base_coding_agent = executors::command::AgentProfiles::get_cached() - .get_profile(&default_profile_label) - .map(|profile| profile.agent.to_string()) + let profile_label = executors::profile::ProfileConfigs::get_cached() + .get_profile(&default_profile_variant.profile) + .map(|profile| profile.default.label.clone()) .ok_or_else(|| { ApiError::TaskAttempt(TaskAttemptError::ValidationError(format!( - "Profile not found: {}", - default_profile_label + "Profile not found: {:?}", + default_profile_variant ))) })?; let task_attempt = TaskAttempt::create( &deployment.db().pool, &CreateTaskAttempt { - base_coding_agent: base_coding_agent.clone(), + profile: profile_label.clone(), base_branch: branch, }, task.id, @@ -116,15 +116,15 @@ pub async fn create_task_and_start( .await?; let execution_process = deployment .container() - .start_attempt(&task_attempt, default_profile_label.clone()) + .start_attempt(&task_attempt, default_profile_variant.clone()) .await?; deployment .track_if_analytics_allowed( "task_attempt_started", serde_json::json!({ "task_id": task.id.to_string(), - "base_coding_agent": &base_coding_agent, - "profile": &default_profile_label, + "profile": &profile_label, + "variant": &default_profile_variant, "attempt_id": task_attempt.id.to_string(), }), ) @@ -147,7 +147,7 @@ pub async fn create_task_and_start( has_in_progress_attempt: true, has_merged_attempt: false, last_attempt_failed: false, - base_coding_agent: task_attempt.base_coding_agent, + profile: task_attempt.profile, }))) } diff --git a/crates/services/src/services/config/mod.rs b/crates/services/src/services/config/mod.rs index 9df8da41..d2d8856c 100644 --- a/crates/services/src/services/config/mod.rs +++ b/crates/services/src/services/config/mod.rs @@ -10,15 +10,17 @@ pub enum ConfigError { Io(#[from] std::io::Error), #[error(transparent)] Json(#[from] serde_json::Error), + #[error("Validation error: {0}")] + ValidationError(String), } -pub type Config = versions::v3::Config; -pub type NotificationConfig = versions::v3::NotificationConfig; -pub type EditorConfig = versions::v3::EditorConfig; -pub type ThemeMode = versions::v3::ThemeMode; -pub type SoundFile = versions::v3::SoundFile; -pub type EditorType = versions::v3::EditorType; -pub type GitHubConfig = versions::v3::GitHubConfig; +pub type Config = versions::v4::Config; +pub type NotificationConfig = versions::v4::NotificationConfig; +pub type EditorConfig = versions::v4::EditorConfig; +pub type ThemeMode = versions::v4::ThemeMode; +pub type SoundFile = versions::v4::SoundFile; +pub type EditorType = versions::v4::EditorType; +pub type GitHubConfig = versions::v4::GitHubConfig; /// Will always return config, trying old schemas or eventually returning default pub async fn load_config_from_file(config_path: &PathBuf) -> Config { diff --git a/crates/services/src/services/config/versions/mod.rs b/crates/services/src/services/config/versions/mod.rs index 3bb4664b..357b4e00 100644 --- a/crates/services/src/services/config/versions/mod.rs +++ b/crates/services/src/services/config/versions/mod.rs @@ -1,3 +1,4 @@ pub(super) mod v1; pub(super) mod v2; pub(super) mod v3; +pub(super) mod v4; diff --git a/crates/services/src/services/config/versions/v4.rs b/crates/services/src/services/config/versions/v4.rs new file mode 100644 index 00000000..4c5fc833 --- /dev/null +++ b/crates/services/src/services/config/versions/v4.rs @@ -0,0 +1,110 @@ +use anyhow::Error; +use executors::profile::ProfileVariantLabel; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; +pub use v3::{EditorConfig, EditorType, GitHubConfig, NotificationConfig, SoundFile, ThemeMode}; + +use crate::services::config::versions::v3; + +#[derive(Clone, Debug, Serialize, Deserialize, TS)] +pub struct Config { + pub config_version: String, + pub theme: ThemeMode, + pub profile: ProfileVariantLabel, + pub disclaimer_acknowledged: bool, + pub onboarding_acknowledged: bool, + pub github_login_acknowledged: bool, + pub telemetry_acknowledged: bool, + pub notifications: NotificationConfig, + pub editor: EditorConfig, + pub github: GitHubConfig, + pub analytics_enabled: Option, + pub workspace_dir: Option, +} + +impl Config { + pub fn from_previous_version(raw_config: &str) -> Result { + let old_config = match serde_json::from_str::(raw_config) { + Ok(cfg) => cfg, + Err(e) => { + tracing::error!("❌ Failed to parse config: {}", e); + tracing::error!(" at line {}, column {}", e.line(), e.column()); + return Err(e.into()); + } + }; + let mut onboarding_acknowledged = old_config.onboarding_acknowledged; + let profile = match old_config.profile.as_str() { + "claude-code" => ProfileVariantLabel::default("claude-code".to_string()), + "claude-code-plan" => { + ProfileVariantLabel::with_variant("claude-code".to_string(), "plan".to_string()) + } + "claude-code-router" => { + ProfileVariantLabel::with_variant("claude-code".to_string(), "router".to_string()) + } + "amp" => ProfileVariantLabel::default("amp".to_string()), + "gemini" => ProfileVariantLabel::default("gemini".to_string()), + "codex" => ProfileVariantLabel::default("codex".to_string()), + "opencode" => ProfileVariantLabel::default("opencode".to_string()), + "qwen-code" => ProfileVariantLabel::default("qwen-code".to_string()), + _ => { + onboarding_acknowledged = false; // Reset the user's onboarding if executor is not supported + ProfileVariantLabel::default("claude-code".to_string()) + } + }; + + Ok(Self { + config_version: "v4".to_string(), + theme: old_config.theme, + profile, + disclaimer_acknowledged: old_config.disclaimer_acknowledged, + onboarding_acknowledged, + github_login_acknowledged: old_config.github_login_acknowledged, + telemetry_acknowledged: old_config.telemetry_acknowledged, + notifications: old_config.notifications, + editor: old_config.editor, + github: old_config.github, + analytics_enabled: old_config.analytics_enabled, + workspace_dir: old_config.workspace_dir, + }) + } +} + +impl From for Config { + fn from(raw_config: String) -> Self { + if let Ok(config) = serde_json::from_str::(&raw_config) + && config.config_version == "v4" + { + return config; + } + + match Self::from_previous_version(&raw_config) { + Ok(config) => { + tracing::info!("Config upgraded to v3"); + config + } + Err(e) => { + tracing::warn!("Config migration failed: {}, using default", e); + Self::default() + } + } + } +} + +impl Default for Config { + fn default() -> Self { + Self { + config_version: "v4".to_string(), + theme: ThemeMode::System, + profile: ProfileVariantLabel::default("claude-code".to_string()), + disclaimer_acknowledged: false, + onboarding_acknowledged: false, + github_login_acknowledged: false, + telemetry_acknowledged: false, + notifications: NotificationConfig::default(), + editor: EditorConfig::default(), + github: GitHubConfig::default(), + analytics_enabled: None, + workspace_dir: None, + } + } +} diff --git a/crates/services/src/services/container.rs b/crates/services/src/services/container.rs index d9610a16..e6dad90f 100644 --- a/crates/services/src/services/container.rs +++ b/crates/services/src/services/container.rs @@ -31,6 +31,7 @@ use executors::{ }, executors::{CodingAgent, ExecutorError, StandardCodingAgentExecutor}, logs::utils::patch::ConversationPatch, + profile::ProfileVariantLabel, }; use futures::{StreamExt, TryStreamExt, future}; use sqlx::Error as SqlxError; @@ -309,22 +310,26 @@ pub trait ContainerService { // Spawn normalizer on populated store match executor_action.typ() { ExecutorActionType::CodingAgentInitialRequest(request) => { - if let Ok(executor) = CodingAgent::from_profile_str(&request.profile) { + if let Ok(executor) = + CodingAgent::from_profile_variant_label(&request.profile_variant_label) + { executor.normalize_logs(temp_store.clone(), ¤t_dir); } else { tracing::error!( - "Failed to resolve profile '{}' for normalization", - request.profile + "Failed to resolve profile '{:?}' for normalization", + request.profile_variant_label ); } } ExecutorActionType::CodingAgentFollowUpRequest(request) => { - if let Ok(executor) = CodingAgent::from_profile_str(&request.profile) { + if let Ok(executor) = + CodingAgent::from_profile_variant_label(&request.profile_variant_label) + { executor.normalize_logs(temp_store.clone(), ¤t_dir); } else { tracing::error!( - "Failed to resolve profile '{}' for normalization", - request.profile + "Failed to resolve profile '{:?}' for normalization", + request.profile_variant_label ); } } @@ -426,7 +431,7 @@ pub trait ContainerService { async fn start_attempt( &self, task_attempt: &TaskAttempt, - profile_label: String, + profile_variant_label: ProfileVariantLabel, ) -> Result { // Create container self.create(task_attempt).await?; @@ -471,7 +476,7 @@ pub trait ContainerService { Some(Box::new(ExecutorAction::new( ExecutorActionType::CodingAgentInitialRequest(CodingAgentInitialRequest { prompt: task.to_prompt(), - profile: profile_label, + profile_variant_label, }), cleanup_action, ))), @@ -487,7 +492,7 @@ pub trait ContainerService { let executor_action = ExecutorAction::new( ExecutorActionType::CodingAgentInitialRequest(CodingAgentInitialRequest { prompt: task.to_prompt(), - profile: profile_label, + profile_variant_label, }), cleanup_action, ); @@ -529,13 +534,19 @@ pub trait ContainerService { ExecutionProcess::create(&self.db().pool, &create_execution_process, Uuid::new_v4()) .await?; - if let ExecutorActionType::CodingAgentInitialRequest(coding_agent_request) = - executor_action.typ() - { + if let Some(prompt) = match executor_action.typ() { + ExecutorActionType::CodingAgentInitialRequest(coding_agent_request) => { + Some(coding_agent_request.prompt.clone()) + } + ExecutorActionType::CodingAgentFollowUpRequest(follow_up_request) => { + Some(follow_up_request.prompt.clone()) + } + _ => None, + } { let create_executor_data = CreateExecutorSession { task_attempt_id: task_attempt.id, execution_process_id: execution_process.id, - prompt: Some(coding_agent_request.prompt.clone()), + prompt: Some(prompt), }; let executor_session_record_id = Uuid::new_v4(); @@ -556,30 +567,34 @@ pub trait ContainerService { match executor_action.typ() { ExecutorActionType::CodingAgentInitialRequest(request) => { if let Some(msg_store) = self.get_msg_store_by_id(&execution_process.id).await { - if let Ok(executor) = CodingAgent::from_profile_str(&request.profile) { + if let Ok(executor) = + CodingAgent::from_profile_variant_label(&request.profile_variant_label) + { executor.normalize_logs( msg_store, &self.task_attempt_to_current_dir(task_attempt), ); } else { tracing::error!( - "Failed to resolve profile '{}' for normalization", - request.profile + "Failed to resolve profile '{:?}' for normalization", + request.profile_variant_label ); } } } ExecutorActionType::CodingAgentFollowUpRequest(request) => { if let Some(msg_store) = self.get_msg_store_by_id(&execution_process.id).await { - if let Ok(executor) = CodingAgent::from_profile_str(&request.profile) { + if let Ok(executor) = + CodingAgent::from_profile_variant_label(&request.profile_variant_label) + { executor.normalize_logs( msg_store, &self.task_attempt_to_current_dir(task_attempt), ); } else { tracing::error!( - "Failed to resolve profile '{}' for normalization", - request.profile + "Failed to resolve profile '{:?}' for normalization", + request.profile_variant_label ); } } diff --git a/crates/services/src/services/filesystem_watcher.rs b/crates/services/src/services/filesystem_watcher.rs index ce84f106..2fa8985a 100644 --- a/crates/services/src/services/filesystem_watcher.rs +++ b/crates/services/src/services/filesystem_watcher.rs @@ -71,7 +71,7 @@ fn build_gitignore_set(root: &Path) -> Result Ok(builder.build()?) } -fn path_allowed(path: &PathBuf, gi: &Gitignore, canonical_root: &Path) -> bool { +fn path_allowed(path: &Path, gi: &Gitignore, canonical_root: &Path) -> bool { let canonical_path = canonicalize_lossy(path); // Convert absolute path to relative path from the gitignore root diff --git a/crates/services/src/services/git.rs b/crates/services/src/services/git.rs index 29557eca..fa1e7886 100644 --- a/crates/services/src/services/git.rs +++ b/crates/services/src/services/git.rs @@ -1,4 +1,4 @@ -use std::path::{Path, PathBuf}; +use std::path::Path; use chrono::{DateTime, Utc}; use git2::{ @@ -927,7 +927,7 @@ impl GitService { } /// Get the default branch name for the repository - pub fn get_default_branch_name(&self, repo_path: &PathBuf) -> Result { + pub fn get_default_branch_name(&self, repo_path: &Path) -> Result { let repo = self.open_repo(repo_path)?; match repo.head() { @@ -945,7 +945,7 @@ impl GitService { /// Extract GitHub owner and repo name from git repo path pub fn get_github_repo_info( &self, - repo_path: &PathBuf, + repo_path: &Path, ) -> Result<(String, String), GitServiceError> { let repo = self.open_repo(repo_path)?; let remote = repo.find_remote("origin").map_err(|_| { diff --git a/crates/services/src/services/notification.rs b/crates/services/src/services/notification.rs index 3206f52f..77de8bbc 100644 --- a/crates/services/src/services/notification.rs +++ b/crates/services/src/services/notification.rs @@ -19,11 +19,11 @@ impl NotificationService { let message = match ctx.execution_process.status { ExecutionProcessStatus::Completed => format!( "✅ '{}' completed successfully\nBranch: {:?}\nExecutor: {}", - ctx.task.title, ctx.task_attempt.branch, ctx.task_attempt.base_coding_agent + ctx.task.title, ctx.task_attempt.branch, ctx.task_attempt.profile ), ExecutionProcessStatus::Failed | ExecutionProcessStatus::Killed => format!( "❌ '{}' execution failed\nBranch: {:?}\nExecutor: {}", - ctx.task.title, ctx.task_attempt.branch, ctx.task_attempt.base_coding_agent + ctx.task.title, ctx.task_attempt.branch, ctx.task_attempt.profile ), _ => { tracing::warn!( diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 495097bf..47d242a9 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -40,8 +40,7 @@ "react-window": "^1.8.11", "rfc6902": "^5.1.2", "tailwind-merge": "^2.2.0", - "tailwindcss-animate": "^1.0.7", - "use-debounce": "^10.0.5" + "tailwindcss-animate": "^1.0.7" }, "devDependencies": { "@types/react": "^18.2.43", @@ -7459,18 +7458,6 @@ } } }, - "node_modules/use-debounce": { - "version": "10.0.5", - "resolved": "https://registry.npmjs.org/use-debounce/-/use-debounce-10.0.5.tgz", - "integrity": "sha512-Q76E3lnIV+4YT9AHcrHEHYmAd9LKwUAbPXDm7FlqVGDHiSOhX3RDjT8dm0AxbJup6WgOb1YEcKyCr11kBJR5KQ==", - "license": "MIT", - "engines": { - "node": ">= 16.0.0" - }, - "peerDependencies": { - "react": "*" - } - }, "node_modules/use-isomorphic-layout-effect": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.1.tgz", diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index bc1d65a8..32f5c281 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -12,7 +12,7 @@ import { OnboardingDialog } from '@/components/OnboardingDialog'; import { PrivacyOptInDialog } from '@/components/PrivacyOptInDialog'; import { ConfigProvider, useConfig } from '@/components/config-provider'; import { ThemeProvider } from '@/components/theme-provider'; -import type { EditorType } from 'shared/types'; +import type { EditorType, ProfileVariantLabel } from 'shared/types'; import { ThemeMode } from 'shared/types'; import { configApi } from '@/lib/api'; import * as Sentry from '@sentry/react'; @@ -61,7 +61,7 @@ function AppContent() { }; const handleOnboardingComplete = async (onboardingConfig: { - profile: string; + profile: ProfileVariantLabel; editor: { editor_type: EditorType; custom_command: string | null }; }) => { if (!config) return; diff --git a/frontend/src/components/OnboardingDialog.tsx b/frontend/src/components/OnboardingDialog.tsx index 10affc1b..7ce5e3ee 100644 --- a/frontend/src/components/OnboardingDialog.tsx +++ b/frontend/src/components/OnboardingDialog.tsx @@ -15,11 +15,17 @@ import { SelectTrigger, SelectValue, } from '@/components/ui/select'; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from '@/components/ui/dropdown-menu'; import { Label } from '@/components/ui/label'; import { Input } from '@/components/ui/input'; import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'; -import { Sparkles, Code } from 'lucide-react'; -import { EditorType } from 'shared/types'; +import { Sparkles, Code, ChevronDown } from 'lucide-react'; +import { EditorType, ProfileVariantLabel } from 'shared/types'; import { useUserSystem } from '@/components/config-provider'; import { toPrettyCase } from '@/utils/string'; @@ -27,13 +33,16 @@ import { toPrettyCase } from '@/utils/string'; interface OnboardingDialogProps { open: boolean; onComplete: (config: { - profile: string; + profile: ProfileVariantLabel; editor: { editor_type: EditorType; custom_command: string | null }; }) => void; } export function OnboardingDialog({ open, onComplete }: OnboardingDialogProps) { - const [profile, setProfile] = useState('claude-code'); + const [profile, setProfile] = useState({ + profile: 'claude-code', + variant: null, + }); const [editorType, setEditorType] = useState(EditorType.VS_CODE); const [customCommand, setCustomCommand] = useState(''); @@ -79,21 +88,95 @@ export function OnboardingDialog({ open, onComplete }: OnboardingDialogProps) {
- +
+ + + {/* Show variant selector if selected profile has variants */} + {(() => { + const selectedProfile = profiles?.find( + (p) => p.label === profile.profile + ); + const hasVariants = + selectedProfile?.variants && + selectedProfile.variants.length > 0; + + if (hasVariants) { + return ( + + + + + + + setProfile({ ...profile, variant: null }) + } + className={!profile.variant ? 'bg-accent' : ''} + > + Default + + {selectedProfile.variants.map((variant) => ( + + setProfile({ + ...profile, + variant: variant.label, + }) + } + className={ + profile.variant === variant.label + ? 'bg-accent' + : '' + } + > + {variant.label} + + ))} + + + ); + } else if (selectedProfile) { + // Show disabled button when profile exists but has no variants + return ( + + ); + } + return null; + })()} +
diff --git a/frontend/src/components/common/ProfileVariantBadge.tsx b/frontend/src/components/common/ProfileVariantBadge.tsx new file mode 100644 index 00000000..13fd22a0 --- /dev/null +++ b/frontend/src/components/common/ProfileVariantBadge.tsx @@ -0,0 +1,30 @@ +import type { ProfileVariantLabel } from 'shared/types'; +import { cn } from '@/lib/utils'; + +interface ProfileVariantBadgeProps { + profileVariant: ProfileVariantLabel | null; + className?: string; +} + +export function ProfileVariantBadge({ + profileVariant, + className, +}: ProfileVariantBadgeProps) { + if (!profileVariant) { + return null; + } + + return ( + + {profileVariant.profile} + {profileVariant.variant && ( + <> + / + {profileVariant.variant} + + )} + + ); +} + +export default ProfileVariantBadge; diff --git a/frontend/src/components/config-provider.tsx b/frontend/src/components/config-provider.tsx index c91d9e24..a4d3efa2 100644 --- a/frontend/src/components/config-provider.tsx +++ b/frontend/src/components/config-provider.tsx @@ -10,7 +10,7 @@ import { import { type Config, type Environment, - type AgentProfile, + type ProfileConfig, type UserSystemInfo, CheckTokenResponse, } from 'shared/types'; @@ -19,7 +19,7 @@ import { configApi, githubAuthApi } from '../lib/api'; interface UserSystemState { config: Config | null; environment: Environment | null; - profiles: AgentProfile[] | null; + profiles: ProfileConfig[] | null; } interface UserSystemContextType { @@ -34,9 +34,12 @@ interface UserSystemContextType { // System data access environment: Environment | null; - profiles: AgentProfile[] | null; + profiles: ProfileConfig[] | null; setEnvironment: (env: Environment | null) => void; - setProfiles: (profiles: AgentProfile[] | null) => void; + setProfiles: (profiles: ProfileConfig[] | null) => void; + + // Reload system data + reloadSystem: () => Promise; // State loading: boolean; @@ -55,7 +58,7 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { // Split state for performance - independent re-renders const [config, setConfig] = useState(null); const [environment, setEnvironment] = useState(null); - const [profiles, setProfiles] = useState(null); + const [profiles, setProfiles] = useState(null); const [loading, setLoading] = useState(true); const [githubTokenInvalid, setGithubTokenInvalid] = useState(false); @@ -133,6 +136,20 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { [config] ); + const reloadSystem = useCallback(async () => { + setLoading(true); + try { + const userSystemInfo: UserSystemInfo = await configApi.getConfig(); + setConfig(userSystemInfo.config); + setEnvironment(userSystemInfo.environment); + setProfiles(userSystemInfo.profiles); + } catch (err) { + console.error('Error reloading user system:', err); + } finally { + setLoading(false); + } + }, []); + // Memoize context value to prevent unnecessary re-renders const value = useMemo( () => ({ @@ -145,6 +162,7 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { updateAndSaveConfig, setEnvironment, setProfiles, + reloadSystem, loading, githubTokenInvalid, }), @@ -155,6 +173,7 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { updateConfig, saveConfig, updateAndSaveConfig, + reloadSystem, loading, githubTokenInvalid, ] diff --git a/frontend/src/components/context/TaskDetailsContextProvider.tsx b/frontend/src/components/context/TaskDetailsContextProvider.tsx index be584b85..f89fdbf3 100644 --- a/frontend/src/components/context/TaskDetailsContextProvider.tsx +++ b/frontend/src/components/context/TaskDetailsContextProvider.tsx @@ -8,7 +8,7 @@ import { useMemo, useState, } from 'react'; -import type { ExecutionProcess, ExecutionProcessSummary } from 'shared/types'; +import type { ExecutionProcess } from 'shared/types'; import type { EditorType, TaskAttempt, @@ -24,6 +24,7 @@ import { TaskSelectedAttemptContext, } from './taskDetailsContext.ts'; import type { AttemptData } from '@/lib/types.ts'; +import { useUserSystem } from '@/components/config-provider'; const TaskDetailsProvider: FC<{ task: TaskWithAttemptStatus; @@ -38,6 +39,7 @@ const TaskDetailsProvider: FC<{ setShowEditorDialog, projectHasDevScript, }) => { + const { profiles } = useUserSystem(); const [loading, setLoading] = useState(false); const [isStopping, setIsStopping] = useState(false); const [selectedAttempt, setSelectedAttempt] = useState( @@ -83,21 +85,8 @@ const TaskDetailsProvider: FC<{ await executionProcessesApi.getExecutionProcesses(attemptId); if (processesResult !== undefined) { - const runningProcesses = processesResult.filter( - (process) => process.status === 'running' - ); - const runningProcessDetails: Record = {}; - // Fetch details for running processes - for (const process of runningProcesses) { - const result = await executionProcessesApi.getDetails(process.id); - - if (result !== undefined) { - runningProcessDetails[process.id] = result; - } - } - // Also fetch setup script process details if it exists in the processes const setupProcess = processesResult.find( (process) => process.run_reason === 'setupscript' @@ -109,6 +98,7 @@ const TaskDetailsProvider: FC<{ if (result !== undefined) { runningProcessDetails[setupProcess.id] = result; + // Extract ProfileVariant from the executor_action } } @@ -140,7 +130,7 @@ const TaskDetailsProvider: FC<{ } return attemptData.processes.some( - (process: ExecutionProcessSummary) => + (process: ExecutionProcess) => (process.run_reason === 'codingagent' || process.run_reason === 'setupscript' || process.run_reason === 'cleanupscript') && @@ -148,6 +138,32 @@ const TaskDetailsProvider: FC<{ ); }, [selectedAttempt, attemptData.processes, isStopping]); + const defaultFollowUpVariant = useMemo(() => { + // Find most recent coding agent process with variant + const latest_profile = attemptData.processes + .filter((p) => p.run_reason === 'codingagent') + .reverse() + .map((process) => { + if ( + process.executor_action?.typ.type === 'CodingAgentInitialRequest' || + process.executor_action?.typ.type === 'CodingAgentFollowUpRequest' + ) { + return process.executor_action?.typ.profile_variant_label; + } + })[0]; + if (latest_profile) { + return latest_profile.variant; + } + if (selectedAttempt?.profile && profiles) { + // No processes yet, check if profile has default variant + const profile = profiles.find((p) => p.label === selectedAttempt.profile); + if (profile?.variants && profile.variants.length > 0) { + return profile.variants[0].label; + } + } + return null; + }, [attemptData.processes, selectedAttempt?.profile, profiles]); + useEffect(() => { if (!isAttemptRunning || !task) return; @@ -201,8 +217,9 @@ const TaskDetailsProvider: FC<{ setAttemptData, fetchAttemptData, isAttemptRunning, + defaultFollowUpVariant, }), - [attemptData, fetchAttemptData, isAttemptRunning] + [attemptData, fetchAttemptData, isAttemptRunning, defaultFollowUpVariant] ); return ( diff --git a/frontend/src/components/context/taskDetailsContext.ts b/frontend/src/components/context/taskDetailsContext.ts index 38f05bc0..fddea516 100644 --- a/frontend/src/components/context/taskDetailsContext.ts +++ b/frontend/src/components/context/taskDetailsContext.ts @@ -30,8 +30,9 @@ export const TaskAttemptLoadingContext = interface TaskAttemptDataContextValue { attemptData: AttemptData; setAttemptData: Dispatch>; - fetchAttemptData: (attemptId: string, taskId: string) => Promise | void; + fetchAttemptData: (attemptId: string) => Promise | void; isAttemptRunning: boolean; + defaultFollowUpVariant: string | null; } export const TaskAttemptDataContext = diff --git a/frontend/src/components/tasks/TaskDetails/ProcessCard.tsx b/frontend/src/components/tasks/TaskDetails/ProcessCard.tsx index 5627421e..8c6ff43f 100644 --- a/frontend/src/components/tasks/TaskDetails/ProcessCard.tsx +++ b/frontend/src/components/tasks/TaskDetails/ProcessCard.tsx @@ -8,16 +8,13 @@ import { ChevronDown, ChevronRight, } from 'lucide-react'; -import type { - ExecutionProcessStatus, - ExecutionProcessSummary, -} from 'shared/types'; +import type { ExecutionProcessStatus, ExecutionProcess } from 'shared/types'; import { useLogStream } from '@/hooks/useLogStream'; import { useProcessConversation } from '@/hooks/useProcessConversation'; import DisplayConversationEntry from '@/components/NormalizedConversation/DisplayConversationEntry'; interface ProcessCardProps { - process: ExecutionProcessSummary; + process: ExecutionProcess; } function ProcessCard({ process }: ProcessCardProps) { diff --git a/frontend/src/components/tasks/TaskDetails/ProcessesTab.tsx b/frontend/src/components/tasks/TaskDetails/ProcessesTab.tsx index 3950528f..5cac38fa 100644 --- a/frontend/src/components/tasks/TaskDetails/ProcessesTab.tsx +++ b/frontend/src/components/tasks/TaskDetails/ProcessesTab.tsx @@ -10,10 +10,8 @@ import { } from 'lucide-react'; import { TaskAttemptDataContext } from '@/components/context/taskDetailsContext.ts'; import { executionProcessesApi } from '@/lib/api.ts'; -import type { - ExecutionProcessStatus, - ExecutionProcessSummary, -} from 'shared/types'; +import { ProfileVariantBadge } from '@/components/common/ProfileVariantBadge.tsx'; +import type { ExecutionProcessStatus, ExecutionProcess } from 'shared/types'; function ProcessesTab() { const { attemptData, setAttemptData } = useContext(TaskAttemptDataContext); @@ -78,7 +76,7 @@ function ProcessesTab() { } }; - const handleProcessClick = async (process: ExecutionProcessSummary) => { + const handleProcessClick = async (process: ExecutionProcess) => { setSelectedProcessId(process.id); // If we don't have details for this process, fetch them @@ -127,6 +125,22 @@ function ProcessesTab() {

Process ID: {process.id}

+ { +

+ Profile:{' '} + {process.executor_action.typ.type === + 'CodingAgentInitialRequest' || + process.executor_action.typ.type === + 'CodingAgentFollowUpRequest' ? ( + + ) : null} +

+ }
@@ -189,6 +203,20 @@ function ProcessesTab() { Exit Code:{' '} {selectedProcess.exit_code?.toString() ?? 'N/A'}

+ {selectedProcess.executor_action.typ.type === + 'CodingAgentInitialRequest' || + selectedProcess.executor_action.typ.type === + 'CodingAgentFollowUpRequest' ? ( +

+ Profile:{' '} + +

+ ) : null}
diff --git a/frontend/src/components/tasks/TaskDetailsToolbar.tsx b/frontend/src/components/tasks/TaskDetailsToolbar.tsx index b7f7f1a1..805f46d9 100644 --- a/frontend/src/components/tasks/TaskDetailsToolbar.tsx +++ b/frontend/src/components/tasks/TaskDetailsToolbar.tsx @@ -10,7 +10,7 @@ import { useLocation, useNavigate, useParams } from 'react-router-dom'; import { Play } from 'lucide-react'; import { Button } from '@/components/ui/button'; import { attemptsApi, projectsApi } from '@/lib/api'; -import type { GitBranch } from 'shared/types'; +import type { GitBranch, ProfileVariantLabel } from 'shared/types'; import type { TaskAttempt } from 'shared/types'; import { @@ -90,7 +90,8 @@ function TaskDetailsToolbar() { const [taskAttempts, setTaskAttempts] = useState([]); const [branches, setBranches] = useState([]); const [selectedBranch, setSelectedBranch] = useState(null); - const [selectedProfile, setSelectedProfile] = useState(null); + const [selectedProfile, setSelectedProfile] = + useState(null); const navigate = useNavigate(); const { attemptId: urlAttemptId } = useParams<{ attemptId?: string }>(); diff --git a/frontend/src/components/tasks/TaskFollowUpSection.tsx b/frontend/src/components/tasks/TaskFollowUpSection.tsx index 59a92b77..c3487b53 100644 --- a/frontend/src/components/tasks/TaskFollowUpSection.tsx +++ b/frontend/src/components/tasks/TaskFollowUpSection.tsx @@ -1,8 +1,8 @@ -import { AlertCircle, Send } from 'lucide-react'; +import { AlertCircle, Send, ChevronDown } from 'lucide-react'; import { Button } from '@/components/ui/button'; import { Alert, AlertDescription } from '@/components/ui/alert'; import { FileSearchTextarea } from '@/components/ui/file-search-textarea'; -import { useContext, useMemo, useState } from 'react'; +import { useContext, useEffect, useMemo, useState } from 'react'; import { attemptsApi } from '@/lib/api.ts'; import { TaskAttemptDataContext, @@ -10,17 +10,34 @@ import { TaskSelectedAttemptContext, } from '@/components/context/taskDetailsContext.ts'; import { Loader } from '@/components/ui/loader'; +import { useUserSystem } from '@/components/config-provider'; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from '@/components/ui/dropdown-menu'; export function TaskFollowUpSection() { const { task, projectId } = useContext(TaskDetailsContext); const { selectedAttempt } = useContext(TaskSelectedAttemptContext); - const { attemptData, fetchAttemptData, isAttemptRunning } = useContext( - TaskAttemptDataContext - ); + const { + attemptData, + fetchAttemptData, + isAttemptRunning, + defaultFollowUpVariant, + } = useContext(TaskAttemptDataContext); + const { profiles } = useUserSystem(); const [followUpMessage, setFollowUpMessage] = useState(''); const [isSendingFollowUp, setIsSendingFollowUp] = useState(false); const [followUpError, setFollowUpError] = useState(null); + const [selectedVariant, setSelectedVariant] = useState( + defaultFollowUpVariant + ); + + // Get the profile from the selected attempt + const selectedProfile = selectedAttempt?.profile || null; const canSendFollowUp = useMemo(() => { if ( @@ -38,6 +55,15 @@ export function TaskFollowUpSection() { isAttemptRunning, isSendingFollowUp, ]); + const currentProfile = useMemo(() => { + if (!selectedProfile || !profiles) return null; + return profiles.find((p) => p.label === selectedProfile); + }, [selectedProfile, profiles]); + + // Update selectedVariant when defaultFollowUpVariant changes + useEffect(() => { + setSelectedVariant(defaultFollowUpVariant); + }, [defaultFollowUpVariant]); const onSendFollowUp = async () => { if (!task || !selectedAttempt || !followUpMessage.trim()) return; @@ -47,9 +73,10 @@ export function TaskFollowUpSection() { setFollowUpError(null); await attemptsApi.followUp(selectedAttempt.id, { prompt: followUpMessage.trim(), + variant: selectedVariant, }); setFollowUpMessage(''); - fetchAttemptData(selectedAttempt.id, selectedAttempt.task_id); + fetchAttemptData(selectedAttempt.id); } catch (error: unknown) { // @ts-expect-error it is type ApiError setFollowUpError(`Failed to start follow-up execution: ${error.message}`); @@ -68,48 +95,115 @@ export function TaskFollowUpSection() { {followUpError} )} -
- { - setFollowUpMessage(value); - if (followUpError) setFollowUpError(null); - }} - onKeyDown={(e) => { - if ((e.metaKey || e.ctrlKey) && e.key === 'Enter') { - e.preventDefault(); - if ( - canSendFollowUp && - followUpMessage.trim() && - !isSendingFollowUp - ) { - onSendFollowUp(); +
+
+ { + setFollowUpMessage(value); + if (followUpError) setFollowUpError(null); + }} + onKeyDown={(e) => { + if ((e.metaKey || e.ctrlKey) && e.key === 'Enter') { + e.preventDefault(); + if ( + canSendFollowUp && + followUpMessage.trim() && + !isSendingFollowUp + ) { + onSendFollowUp(); + } } + }} + className="flex-1 min-h-[40px] resize-none" + disabled={!canSendFollowUp} + projectId={projectId} + rows={1} + maxRows={6} + /> + + {/* Variant selector */} + {(() => { + const hasVariants = + currentProfile?.variants && + currentProfile.variants.length > 0; + + if (hasVariants) { + return ( + + + + + + setSelectedVariant(null)} + className={!selectedVariant ? 'bg-accent' : ''} + > + Default + + {currentProfile.variants.map((variant) => ( + setSelectedVariant(variant.label)} + className={ + selectedVariant === variant.label + ? 'bg-accent' + : '' + } + > + {variant.label} + + ))} + + + ); + } else if (currentProfile) { + // Show disabled button when profile exists but has no variants + return ( + + ); } - }} - className="flex-1 min-h-[40px] resize-none" - disabled={!canSendFollowUp} - projectId={projectId} - rows={1} - maxRows={6} - /> - + return null; + })()} + + +
diff --git a/frontend/src/components/tasks/Toolbar/CreateAttempt.tsx b/frontend/src/components/tasks/Toolbar/CreateAttempt.tsx index 7bc0c5f2..94fca9ec 100644 --- a/frontend/src/components/tasks/Toolbar/CreateAttempt.tsx +++ b/frontend/src/components/tasks/Toolbar/CreateAttempt.tsx @@ -7,7 +7,11 @@ import { DropdownMenuItem, DropdownMenuTrigger, } from '@/components/ui/dropdown-menu.tsx'; -import type { AgentProfile, GitBranch } from 'shared/types'; +import type { + ProfileConfig, + GitBranch, + ProfileVariantLabel, +} from 'shared/types'; import type { TaskAttempt } from 'shared/types'; import { attemptsApi } from '@/lib/api.ts'; import { @@ -30,13 +34,13 @@ type Props = { branches: GitBranch[]; taskAttempts: TaskAttempt[]; createAttemptBranch: string | null; - selectedProfile: string | null; + selectedProfile: ProfileVariantLabel | null; selectedBranch: string | null; fetchTaskAttempts: () => void; setIsInCreateAttemptMode: Dispatch>; setCreateAttemptBranch: Dispatch>; - setSelectedProfile: Dispatch>; - availableProfiles: AgentProfile[] | null; + setSelectedProfile: Dispatch>; + availableProfiles: ProfileConfig[] | null; }; function CreateAttempt({ @@ -63,7 +67,7 @@ function CreateAttempt({ // Create attempt logic const actuallyCreateAttempt = useCallback( - async (profile: string, baseBranch?: string) => { + async (profile: ProfileVariantLabel, baseBranch?: string) => { const effectiveBaseBranch = baseBranch || selectedBranch; if (!effectiveBaseBranch) { @@ -72,7 +76,7 @@ function CreateAttempt({ await attemptsApi.create({ task_id: task.id, - profile: profile, + profile_variant_label: profile, base_branch: effectiveBaseBranch, }); fetchTaskAttempts(); @@ -82,7 +86,11 @@ function CreateAttempt({ // Handler for Enter key or Start button const onCreateNewAttempt = useCallback( - (profile: string, baseBranch?: string, isKeyTriggered?: boolean) => { + ( + profile: ProfileVariantLabel, + baseBranch?: string, + isKeyTriggered?: boolean + ) => { if (task.status === 'todo' && isKeyTriggered) { setSelectedProfile(profile); setPendingBaseBranch(baseBranch); @@ -175,45 +183,133 @@ function CreateAttempt({ /> - {/* Step 2: Choose Profile */} + {/* Step 2: Choose Profile and Mode */}
- {availableProfiles && ( - - - - - - {availableProfiles.map((profile) => ( - setSelectedProfile(profile.label)} - className={ - selectedProfile === profile.label ? 'bg-accent' : '' - } +
+ {availableProfiles && ( + + + + + + {availableProfiles.map((profile) => ( + { + setSelectedProfile({ + profile: profile.label, + variant: null, + }); + }} + className={ + selectedProfile?.profile === profile.label + ? 'bg-accent' + : '' + } + > + {profile.label} + + ))} + + + )} + + {/* Show variant dropdown or disabled button */} + {(() => { + const currentProfile = availableProfiles?.find( + (p) => p.label === selectedProfile?.profile + ); + const hasVariants = + currentProfile?.variants && + currentProfile.variants.length > 0; + + if (hasVariants) { + return ( + + + + + + { + if (selectedProfile) { + setSelectedProfile({ + ...selectedProfile, + variant: null, + }); + } + }} + className={ + !selectedProfile?.variant ? 'bg-accent' : '' + } + > + Default + + {currentProfile.variants.map((variant) => ( + { + if (selectedProfile) { + setSelectedProfile({ + ...selectedProfile, + variant: variant.label, + }); + } + }} + className={ + selectedProfile?.variant === variant.label + ? 'bg-accent' + : '' + } + > + {variant.label} + + ))} + + + ); + } else if (currentProfile) { + // Show disabled button when profile exists but has no variants + return ( + + ); + } + return null; + })()} +
{/* Step 3: Start Attempt */} diff --git a/frontend/src/components/tasks/Toolbar/CurrentAttempt.tsx b/frontend/src/components/tasks/Toolbar/CurrentAttempt.tsx index 75494293..eae6c28c 100644 --- a/frontend/src/components/tasks/Toolbar/CurrentAttempt.tsx +++ b/frontend/src/components/tasks/Toolbar/CurrentAttempt.tsx @@ -163,7 +163,7 @@ function CurrentAttempt({ try { await attemptsApi.startDevServer(selectedAttempt.id); - fetchAttemptData(selectedAttempt.id, selectedAttempt.task_id); + fetchAttemptData(selectedAttempt.id); } catch (err) { console.error('Failed to start dev server:', err); } finally { @@ -178,7 +178,7 @@ function CurrentAttempt({ try { await executionProcessesApi.stopExecutionProcess(runningDevServer.id); - fetchAttemptData(selectedAttempt.id, selectedAttempt.task_id); + fetchAttemptData(selectedAttempt.id); } catch (err) { console.error('Failed to stop dev server:', err); } finally { @@ -192,9 +192,9 @@ function CurrentAttempt({ try { setIsStopping(true); await attemptsApi.stop(selectedAttempt.id); - await fetchAttemptData(selectedAttempt.id, selectedAttempt.task_id); + await fetchAttemptData(selectedAttempt.id); setTimeout(() => { - fetchAttemptData(selectedAttempt.id, selectedAttempt.task_id); + fetchAttemptData(selectedAttempt.id); }, 1000); } catch (err) { console.error('Failed to stop executions:', err); @@ -224,7 +224,7 @@ function CurrentAttempt({ const handleAttemptChange = useCallback( (attempt: TaskAttempt) => { handleAttemptSelect(attempt); - fetchAttemptData(attempt.id, attempt.task_id); + fetchAttemptData(attempt.id); }, [fetchAttemptData, handleAttemptSelect] ); @@ -379,11 +379,9 @@ function CurrentAttempt({
- Base Agent -
-
- {selectedAttempt.base_coding_agent} + Profile
+
{selectedAttempt.profile}
@@ -573,7 +571,7 @@ function CurrentAttempt({ {new Date(attempt.created_at).toLocaleTimeString()} - {attempt.base_coding_agent || 'Base Agent'} + {attempt.profile || 'Base Agent'}
diff --git a/frontend/src/constants/processes.ts b/frontend/src/constants/processes.ts index 5b957ea2..e58a96c7 100644 --- a/frontend/src/constants/processes.ts +++ b/frontend/src/constants/processes.ts @@ -1,7 +1,7 @@ import type { ExecutionProcessRunReason, ExecutionProcessStatus, - ExecutionProcessSummary, + ExecutionProcess, } from 'shared/types'; // Process run reasons @@ -49,7 +49,7 @@ export const shouldShowInLogs = ( }; export const getLatestCodingAgent = ( - processes: ExecutionProcessSummary[] + processes: ExecutionProcess[] ): string | null => { const codingAgents = processes.filter((p) => isCodingAgent(p.run_reason)); if (codingAgents.length === 0) return null; diff --git a/frontend/src/hooks/useEventSourceManager.ts b/frontend/src/hooks/useEventSourceManager.ts index 5d1623a8..b543254e 100644 --- a/frontend/src/hooks/useEventSourceManager.ts +++ b/frontend/src/hooks/useEventSourceManager.ts @@ -1,6 +1,6 @@ import { useEffect, useState, useRef } from 'react'; import { applyPatch } from 'rfc6902'; -import type { ExecutionProcessSummary } from 'shared/types'; +import type { ExecutionProcess } from 'shared/types'; import type { ProcessStartPayload } from '@/types/logs'; interface ProcessData { @@ -8,9 +8,9 @@ interface ProcessData { } interface UseEventSourceManagerParams { - processes: ExecutionProcessSummary[]; + processes: ExecutionProcess[]; enabled: boolean; - getEndpoint: (process: ExecutionProcessSummary) => string; + getEndpoint: (process: ExecutionProcess) => string; initialData?: any; } diff --git a/frontend/src/hooks/useProcessesLogs.ts b/frontend/src/hooks/useProcessesLogs.ts index ebfe81e3..25406593 100644 --- a/frontend/src/hooks/useProcessesLogs.ts +++ b/frontend/src/hooks/useProcessesLogs.ts @@ -1,6 +1,6 @@ import { useMemo, useCallback } from 'react'; import type { - ExecutionProcessSummary, + ExecutionProcess, NormalizedEntry, PatchType, } from 'shared/types'; @@ -16,10 +16,10 @@ interface UseProcessesLogsResult { const MAX_ENTRIES = 5000; export const useProcessesLogs = ( - processes: ExecutionProcessSummary[], + processes: ExecutionProcess[], enabled: boolean ): UseProcessesLogsResult => { - const getEndpoint = useCallback((process: ExecutionProcessSummary) => { + const getEndpoint = useCallback((process: ExecutionProcess) => { // Coding agents use normalized logs endpoint, scripts use raw logs endpoint // Both endpoints now return PatchType objects via JSON patches const isCodingAgent = process.run_reason === 'codingagent'; diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 45f64d89..3d2e8ca7 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -15,7 +15,6 @@ import { DirectoryListResponse, EditorType, ExecutionProcess, - ExecutionProcessSummary, GitBranch, Project, CreateProject, @@ -31,6 +30,9 @@ import { UpdateTaskTemplate, UserSystemInfo, GitHubServiceError, + McpServerQuery, + UpdateMcpServersBody, + GetMcpServerResponse, } from 'shared/types'; // Re-export types for convenience @@ -417,11 +419,11 @@ export const attemptsApi = { export const executionProcessesApi = { getExecutionProcesses: async ( attemptId: string - ): Promise => { + ): Promise => { const response = await makeRequest( `/api/execution-processes?task_attempt_id=${attemptId}` ); - return handleApiResponse(response); + return handleApiResponse(response); }, getDetails: async (processId: string): Promise => { @@ -558,24 +560,20 @@ export const templatesApi = { // MCP Servers APIs export const mcpServersApi = { - load: async (executor: string, mcpConfigPath?: string): Promise => { - const params = new URLSearchParams(); - params.set('base_coding_agent', executor); - if (mcpConfigPath) params.set('mcp_config_path', mcpConfigPath); + load: async (query: McpServerQuery): Promise => { + const params = new URLSearchParams(query); const response = await makeRequest(`/api/mcp-config?${params.toString()}`); - return handleApiResponse(response); + return handleApiResponse(response); }, save: async ( - executor: string, - mcpConfigPath: string | undefined, - serversConfig: any + query: McpServerQuery, + data: UpdateMcpServersBody ): Promise => { - const params = new URLSearchParams(); - params.set('base_coding_agent', executor); - if (mcpConfigPath) params.set('mcp_config_path', mcpConfigPath); + const params = new URLSearchParams(query); + // params.set('profile', profile); const response = await makeRequest(`/api/mcp-config?${params.toString()}`, { method: 'POST', - body: JSON.stringify(serversConfig), + body: JSON.stringify(data), }); if (!response.ok) { const errorData = await response.json(); @@ -593,3 +591,21 @@ export const mcpServersApi = { } }, }; + +// Profiles API +export const profilesApi = { + load: async (): Promise<{ content: string; path: string }> => { + const response = await makeRequest('/api/profiles'); + return handleApiResponse<{ content: string; path: string }>(response); + }, + save: async (content: string): Promise => { + const response = await makeRequest('/api/profiles', { + method: 'PUT', + body: content, + headers: { + 'Content-Type': 'application/json', + }, + }); + return handleApiResponse(response); + }, +}; diff --git a/frontend/src/lib/mcp-strategies.ts b/frontend/src/lib/mcp-strategies.ts index 73153a82..9d7ad02f 100644 --- a/frontend/src/lib/mcp-strategies.ts +++ b/frontend/src/lib/mcp-strategies.ts @@ -1,233 +1,84 @@ -// Strategy pattern implementation for MCP server configuration handling -// across different base coding agents (Claude, Amp, Gemini, Opencode, Codex) +import { McpConfig } from 'shared/types'; -import { BaseCodingAgent } from 'shared/types'; - -export interface McpConfigStrategy { - // Get the default empty configuration structure for this executor (as JSON string for textarea) - getDefaultConfig(): string; - - // Create the full configuration structure from servers data - createFullConfig(servers: Record): Record; - - // Validate the full configuration structure - validateFullConfig(config: Record): void; - - // Extract the servers object from the full configuration for API calls - extractServersForApi(fullConfig: Record): Record; - - // Create the vibe-kanban MCP server configuration for this executor - createVibeKanbanConfig(): Record; - - // Add vibe-kanban configuration to existing config - addVibeKanbanToConfig( - existingConfig: Record, - vibeKanbanConfig: Record - ): Record; -} - -/** - * Standard MCP configuration strategy for Claude, Gemini, etc. - * Uses JSON with top-level "mcpServers" - */ -export class StandardMcpStrategy implements McpConfigStrategy { - getDefaultConfig(): string { - return '{\n "mcpServers": {\n }\n}'; +export class McpConfigStrategyGeneral { + static createFullConfig(cfg: McpConfig): Record { + // create a template with servers filled in at cfg.servers + const fullConfig = JSON.parse(JSON.stringify(cfg.template)); + let current = fullConfig; + for (let i = 0; i < cfg.servers_path.length - 1; i++) { + const key = cfg.servers_path[i]; + if (!current[key]) { + current[key] = {}; + } + current = current[key]; + } + if (cfg.servers_path.length > 0) { + const lastKey = cfg.servers_path[cfg.servers_path.length - 1]; + current[lastKey] = cfg.servers; + } + return fullConfig; } - - createFullConfig(servers: Record): Record { - return { mcpServers: servers }; - } - - validateFullConfig(config: Record): void { - if (!config.mcpServers || typeof config.mcpServers !== 'object') { - throw new Error('Configuration must contain an "mcpServers" object'); + static validateFullConfig( + mcp_config: McpConfig, + full_config: Record + ): void { + // Validate using the schema path + let current = full_config; + for (const key of mcp_config.servers_path) { + current = current?.[key]; + if (current === undefined) { + throw new Error( + `Missing required field at path: ${mcp_config.servers_path.join('.')}` + ); + } + } + if (typeof current !== 'object') { + throw new Error('Servers configuration must be an object'); } } - - extractServersForApi(fullConfig: Record): Record { - return fullConfig.mcpServers; - } - - createVibeKanbanConfig(): Record { - return { - command: 'npx', - args: ['-y', 'vibe-kanban', '--mcp'], - }; - } - - addVibeKanbanToConfig( - existingConfig: Record, - vibeKanbanConfig: Record + static extractServersForApi( + mcp_config: McpConfig, + full_config: Record ): Record { - return { - ...existingConfig, - mcpServers: { - ...(existingConfig.mcpServers || {}), - vibe_kanban: vibeKanbanConfig, - }, - }; - } -} - -/** - * AMP-specific MCP configuration strategy - * Uses flat key "amp.mcpServers" in JSON - */ -export class AmpMcpStrategy implements McpConfigStrategy { - getDefaultConfig(): string { - return '{\n "amp.mcpServers": {\n }\n}'; - } - - createFullConfig(servers: Record): Record { - return { 'amp.mcpServers': servers }; - } - - validateFullConfig(config: Record): void { - if ( - !config['amp.mcpServers'] || - typeof config['amp.mcpServers'] !== 'object' - ) { - throw new Error( - 'AMP configuration must contain an "amp.mcpServers" object' - ); + // Extract the servers object based on the path + let current = full_config; + for (const key of mcp_config.servers_path) { + current = current?.[key]; + if (current === undefined) { + throw new Error( + `Missing required field at path: ${mcp_config.servers_path.join('.')}` + ); + } } + return current; } - extractServersForApi(fullConfig: Record): Record { - return fullConfig['amp.mcpServers']; - } - - createVibeKanbanConfig(): Record { - return { - command: 'npx', - args: ['-y', 'vibe-kanban', '--mcp'], - }; - } - - addVibeKanbanToConfig( - existingConfig: Record, - vibeKanbanConfig: Record + static addVibeKanbanToConfig( + mcp_config: McpConfig, + existingConfig: Record ): Record { - return { - ...existingConfig, - 'amp.mcpServers': { - ...(existingConfig['amp.mcpServers'] || {}), - vibe_kanban: vibeKanbanConfig, - }, - }; - } -} + // Clone the existing config to avoid mutations + const updatedConfig = JSON.parse(JSON.stringify(existingConfig)); + let current = updatedConfig; -/** - * Opencode (SST Opencode)-specific MCP configuration strategy - * Uses JSON with top-level "mcp" plus $schema - */ -export class OpencodeMcpStrategy implements McpConfigStrategy { - getDefaultConfig(): string { - return '{\n "mcp": {\n }, "$schema": "https://opencode.ai/config.json"\n}'; - } - - createFullConfig(servers: Record): Record { - return { - mcp: servers, - $schema: 'https://opencode.ai/config.json', - }; - } - - validateFullConfig(config: Record): void { - if (!config.mcp || typeof config.mcp !== 'object') { - throw new Error('Configuration must contain an "mcp" object'); + // Navigate to the correct location for servers (all except the last element) + for (let i = 0; i < mcp_config.servers_path.length - 1; i++) { + const key = mcp_config.servers_path[i]; + if (!current[key]) { + current[key] = {}; + } + current = current[key]; } - } - extractServersForApi(fullConfig: Record): Record { - return fullConfig.mcp; - } - - createVibeKanbanConfig(): Record { - return { - type: 'local', - command: ['npx', '-y', 'vibe-kanban', '--mcp'], - enabled: true, - }; - } - - addVibeKanbanToConfig( - existingConfig: Record, - vibeKanbanConfig: Record - ): Record { - return { - ...existingConfig, - mcp: { - ...(existingConfig.mcp || {}), - vibe_kanban: vibeKanbanConfig, - }, - }; - } -} - -/** - * Codex-specific MCP configuration strategy - * Frontend works with JSON using key "mcp_servers"; backend converts to TOML. - */ -export class CodexMcpStrategy implements McpConfigStrategy { - getDefaultConfig(): string { - // Although Codex uses TOML on disk, the frontend textarea is JSON. - return '{\n "mcp_servers": {\n }\n}'; - } - - createFullConfig(servers: Record): Record { - return { mcp_servers: servers }; - } - - validateFullConfig(config: Record): void { - if (!config.mcp_servers || typeof config.mcp_servers !== 'object') { - throw new Error('Configuration must contain an "mcp_servers" object'); + // Get or create the servers object at the final path element + const lastKey = mcp_config.servers_path[mcp_config.servers_path.length - 1]; + if (!current[lastKey]) { + current[lastKey] = {}; } - } - extractServersForApi(fullConfig: Record): Record { - return fullConfig.mcp_servers; - } + // Add vibe_kanban server with the config from the schema + current[lastKey]['vibe_kanban'] = mcp_config.vibe_kanban; - createVibeKanbanConfig(): Record { - return { - command: 'npx', - args: ['-y', 'vibe-kanban', '--mcp'], - }; - } - - addVibeKanbanToConfig( - existingConfig: Record, - vibeKanbanConfig: Record - ): Record { - return { - ...existingConfig, - mcp_servers: { - ...(existingConfig.mcp_servers || {}), - vibe_kanban: vibeKanbanConfig, - }, - }; - } -} - -/** - * Factory to get the appropriate MCP strategy for a BaseCodingAgent - */ -export function getMcpStrategyByAgent( - agent: BaseCodingAgent -): McpConfigStrategy { - switch (agent) { - case BaseCodingAgent.AMP: - return new AmpMcpStrategy(); - case BaseCodingAgent.OPENCODE: - return new OpencodeMcpStrategy(); - case BaseCodingAgent.CODEX: - return new CodexMcpStrategy(); - case BaseCodingAgent.CLAUDE_CODE: - case BaseCodingAgent.GEMINI: - default: - return new StandardMcpStrategy(); + return updatedConfig; } } diff --git a/frontend/src/lib/types.ts b/frontend/src/lib/types.ts index d98a5c7f..1074cceb 100644 --- a/frontend/src/lib/types.ts +++ b/frontend/src/lib/types.ts @@ -1,7 +1,7 @@ -import { ExecutionProcess, ExecutionProcessSummary } from 'shared/types'; +import { ExecutionProcess } from 'shared/types'; export type AttemptData = { - processes: ExecutionProcessSummary[]; + processes: ExecutionProcess[]; runningProcessDetails: Record; }; diff --git a/frontend/src/pages/McpServers.tsx b/frontend/src/pages/McpServers.tsx index 3cf45353..6ef3097c 100644 --- a/frontend/src/pages/McpServers.tsx +++ b/frontend/src/pages/McpServers.tsx @@ -18,17 +18,18 @@ import { Label } from '@/components/ui/label'; import { Alert, AlertDescription } from '@/components/ui/alert'; import { Textarea } from '@/components/ui/textarea'; import { Loader2 } from 'lucide-react'; -import { AgentProfile } from 'shared/types'; +import { ProfileConfig, McpConfig } from 'shared/types'; import { useUserSystem } from '@/components/config-provider'; import { mcpServersApi } from '../lib/api'; -import { getMcpStrategyByAgent } from '../lib/mcp-strategies'; +import { McpConfigStrategyGeneral } from '../lib/mcp-strategies'; export function McpServers() { const { config, profiles } = useUserSystem(); const [mcpServers, setMcpServers] = useState('{}'); + const [mcpConfig, setMcpConfig] = useState(null); const [mcpError, setMcpError] = useState(null); const [mcpLoading, setMcpLoading] = useState(true); - const [selectedProfile, setSelectedProfile] = useState( + const [selectedProfile, setSelectedProfile] = useState( null ); const [mcpApplying, setMcpApplying] = useState(false); @@ -39,7 +40,9 @@ export function McpServers() { useEffect(() => { if (config?.profile && profiles && !selectedProfile) { // Find the current profile - const currentProfile = profiles.find((p) => p.label === config.profile); + const currentProfile = profiles.find( + (p) => p.label === config.profile.profile + ); if (currentProfile) { setSelectedProfile(currentProfile); } else if (profiles.length > 0) { @@ -51,34 +54,27 @@ export function McpServers() { // Load existing MCP configuration when selected profile changes useEffect(() => { - const loadMcpServersForProfile = async (profile: AgentProfile) => { + const loadMcpServersForProfile = async (profile: ProfileConfig) => { // Reset state when loading setMcpLoading(true); setMcpError(null); - // Set default empty config based on agent type using strategy - const strategy = getMcpStrategyByAgent(profile.agent); - const defaultConfig = strategy.getDefaultConfig(); - setMcpServers(defaultConfig); setMcpConfigPath(''); try { // Load MCP servers for the selected profile/agent - const result = await mcpServersApi.load( - profile.agent, - profile.mcp_config_path || undefined + const result = await mcpServersApi.load({ + profile: profile.label, + }); + // Store the McpConfig from backend + setMcpConfig(result.mcp_config); + // Create the full configuration structure using the schema + const fullConfig = McpConfigStrategyGeneral.createFullConfig( + result.mcp_config ); - // Handle new response format with servers and config_path - const data = result || {}; - const servers = data.servers || {}; - const configPath = data.config_path || ''; - - // Create the full configuration structure using strategy - const strategy = getMcpStrategyByAgent(profile.agent); - const fullConfig = strategy.createFullConfig(servers); const configJson = JSON.stringify(fullConfig, null, 2); setMcpServers(configJson); - setMcpConfigPath(configPath); + setMcpConfigPath(result.config_path); } catch (err: any) { if (err?.message && err.message.includes('does not support MCP')) { setMcpError(err.message); @@ -101,12 +97,11 @@ export function McpServers() { setMcpError(null); // Validate JSON on change - if (value.trim() && selectedProfile) { + if (value.trim() && mcpConfig) { try { - const config = JSON.parse(value); - // Validate that the config has the expected structure using strategy - const strategy = getMcpStrategyByAgent(selectedProfile.agent); - strategy.validateFullConfig(config); + const parsedConfig = JSON.parse(value); + // Validate using the schema path from backend + McpConfigStrategyGeneral.validateFullConfig(mcpConfig, parsedConfig); } catch (err) { if (err instanceof SyntaxError) { setMcpError('Invalid JSON format'); @@ -118,20 +113,16 @@ export function McpServers() { }; const handleConfigureVibeKanban = async () => { - if (!selectedProfile) return; + if (!selectedProfile || !mcpConfig) return; try { // Parse existing configuration const existingConfig = mcpServers.trim() ? JSON.parse(mcpServers) : {}; - // Use strategy to create vibe-kanban configuration - const strategy = getMcpStrategyByAgent(selectedProfile.agent); - const vibeKanbanConfig = strategy.createVibeKanbanConfig(); - - // Add vibe_kanban to the existing configuration using strategy - const updatedConfig = strategy.addVibeKanbanToConfig( - existingConfig, - vibeKanbanConfig + // Add vibe_kanban to the existing configuration using the schema + const updatedConfig = McpConfigStrategyGeneral.addVibeKanbanToConfig( + mcpConfig, + existingConfig ); // Update the textarea with the new configuration @@ -145,7 +136,7 @@ export function McpServers() { }; const handleApplyMcpServers = async () => { - if (!selectedProfile) return; + if (!selectedProfile || !mcpConfig) return; setMcpApplying(true); setMcpError(null); @@ -155,18 +146,18 @@ export function McpServers() { if (mcpServers.trim()) { try { const fullConfig = JSON.parse(mcpServers); - - // Use strategy to validate and extract servers config - const strategy = getMcpStrategyByAgent(selectedProfile.agent); - strategy.validateFullConfig(fullConfig); - - // Extract just the servers object for the API - backend will handle nesting/format - const mcpServersConfig = strategy.extractServersForApi(fullConfig); + McpConfigStrategyGeneral.validateFullConfig(mcpConfig, fullConfig); + const mcpServersConfig = + McpConfigStrategyGeneral.extractServersForApi( + mcpConfig, + fullConfig + ); await mcpServersApi.save( - selectedProfile.agent, - mcpConfigPath || undefined, - mcpServersConfig + { + profile: selectedProfile.label, + }, + { servers: mcpServersConfig } ); // Show success feedback diff --git a/frontend/src/pages/Settings.tsx b/frontend/src/pages/Settings.tsx index 8a523e0b..389e973b 100644 --- a/frontend/src/pages/Settings.tsx +++ b/frontend/src/pages/Settings.tsx @@ -1,4 +1,4 @@ -import { useCallback, useState } from 'react'; +import { useCallback, useState, useEffect } from 'react'; import { Card, CardContent, @@ -14,18 +14,31 @@ import { SelectTrigger, SelectValue, } from '@/components/ui/select'; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from '@/components/ui/dropdown-menu'; import { Label } from '@/components/ui/label'; import { Alert, AlertDescription } from '@/components/ui/alert'; import { Checkbox } from '@/components/ui/checkbox'; import { Input } from '@/components/ui/input'; -import { Key, Loader2, Volume2 } from 'lucide-react'; -import { ThemeMode, EditorType, SoundFile } from 'shared/types'; +import { Textarea } from '@/components/ui/textarea'; +import { ChevronDown, Key, Loader2, Volume2 } from 'lucide-react'; +import { + ThemeMode, + EditorType, + SoundFile, + ProfileVariantLabel, +} from 'shared/types'; import { toPrettyCase } from '@/utils/string'; import { useTheme } from '@/components/theme-provider'; import { useUserSystem } from '@/components/config-provider'; import { GitHubLoginDialog } from '@/components/GitHubLoginDialog'; import { TaskTemplateManager } from '@/components/TaskTemplateManager'; +import { profilesApi } from '@/lib/api'; export function Settings() { const { @@ -35,6 +48,7 @@ export function Settings() { loading, updateAndSaveConfig, profiles, + reloadSystem, } = useUserSystem(); const [saving, setSaving] = useState(false); const [error, setError] = useState(null); @@ -42,6 +56,32 @@ export function Settings() { const { setTheme } = useTheme(); const [showGitHubLogin, setShowGitHubLogin] = useState(false); + // Profiles editor state + const [profilesContent, setProfilesContent] = useState(''); + const [profilesPath, setProfilesPath] = useState(''); + const [profilesError, setProfilesError] = useState(null); + const [profilesLoading, setProfilesLoading] = useState(false); + const [profilesSaving, setProfilesSaving] = useState(false); + const [profilesSuccess, setProfilesSuccess] = useState(false); + + // Load profiles content on mount + useEffect(() => { + const loadProfiles = async () => { + setProfilesLoading(true); + try { + const result = await profilesApi.load(); + setProfilesContent(result.content); + setProfilesPath(result.path); + } catch (err) { + console.error('Failed to load profiles:', err); + setProfilesError('Failed to load profiles'); + } finally { + setProfilesLoading(false); + } + }; + loadProfiles(); + }, []); + const playSound = async (soundFile: SoundFile) => { const audio = new Audio(`/api/sounds/${soundFile}`); try { @@ -51,6 +91,46 @@ export function Settings() { } }; + const handleProfilesChange = (value: string) => { + setProfilesContent(value); + setProfilesError(null); + + // Validate JSON on change + if (value.trim()) { + try { + const parsed = JSON.parse(value); + // Basic structure validation + if (!parsed.profiles || !Array.isArray(parsed.profiles)) { + setProfilesError('Invalid structure: must have a "profiles" array'); + } + } catch (err) { + if (err instanceof SyntaxError) { + setProfilesError('Invalid JSON format'); + } else { + setProfilesError('Validation error'); + } + } + } + }; + + const handleSaveProfiles = async () => { + setProfilesSaving(true); + setProfilesError(null); + setProfilesSuccess(false); + + try { + await profilesApi.save(profilesContent); + // Reload the system to get the updated profiles + await reloadSystem(); + setProfilesSuccess(true); + setTimeout(() => setProfilesSuccess(false), 3000); + } catch (err: any) { + setProfilesError(err.message || 'Failed to save profiles'); + } finally { + setProfilesSaving(false); + } + }; + const handleSave = async () => { if (!config) return; @@ -198,23 +278,106 @@ export function Settings() {
- +
+ + + {/* Show variant selector if selected profile has variants */} + {(() => { + const selectedProfile = profiles?.find( + (p) => p.label === config.profile?.profile + ); + const hasVariants = + selectedProfile?.variants && + selectedProfile.variants.length > 0; + + if (hasVariants) { + return ( + + + + + + { + const newProfile: ProfileVariantLabel = { + profile: config.profile?.profile || '', + variant: null, + }; + updateConfig({ profile: newProfile }); + }} + className={ + !config.profile?.variant ? 'bg-accent' : '' + } + > + Default + + {selectedProfile.variants.map((variant) => ( + { + const newProfile: ProfileVariantLabel = { + profile: config.profile?.profile || '', + variant: variant.label, + }; + updateConfig({ profile: newProfile }); + }} + className={ + config.profile?.variant === variant.label + ? 'bg-accent' + : '' + } + > + {variant.label} + + ))} + + + ); + } else if (selectedProfile) { + // Show disabled button when profile exists but has no variants + return ( + + ); + } + return null; + })()} +

Choose the default profile to use when creating a task attempt. @@ -517,6 +680,87 @@ export function Settings() { + + + + Agent Profiles + + + Configure coding agent profiles with specific command-line + parameters. + + + + {profilesError && ( + + {profilesError} + + )} + + {profilesSuccess && ( + + + ✓ Profiles saved successfully! + + + )} + +

+
+ +