diff --git a/backend/.sqlx/query-b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0.json b/backend/.sqlx/query-056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3.json
similarity index 72%
rename from backend/.sqlx/query-b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0.json
rename to backend/.sqlx/query-056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3.json
index 7734ad6e..8eda52c8 100644
--- a/backend/.sqlx/query-b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0.json
+++ b/backend/.sqlx/query-056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1",
+ "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1",
"describe": {
"columns": [
{
@@ -24,14 +24,19 @@
"type_info": "Text"
},
{
- "name": "created_at!: DateTime",
+ "name": "dev_script",
"ordinal": 4,
"type_info": "Text"
},
{
- "name": "updated_at!: DateTime",
+ "name": "created_at!: DateTime",
"ordinal": 5,
"type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 6,
+ "type_info": "Text"
}
],
"parameters": {
@@ -42,9 +47,10 @@
false,
false,
true,
+ true,
false,
false
]
},
- "hash": "b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0"
+ "hash": "056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3"
}
diff --git a/backend/.sqlx/query-420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf.json b/backend/.sqlx/query-08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee.json
similarity index 72%
rename from backend/.sqlx/query-420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf.json
rename to backend/.sqlx/query-08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee.json
index 114ed94b..1c096326 100644
--- a/backend/.sqlx/query-420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf.json
+++ b/backend/.sqlx/query-08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects ORDER BY created_at DESC",
+ "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects ORDER BY created_at DESC",
"describe": {
"columns": [
{
@@ -24,14 +24,19 @@
"type_info": "Text"
},
{
- "name": "created_at!: DateTime",
+ "name": "dev_script",
"ordinal": 4,
"type_info": "Text"
},
{
- "name": "updated_at!: DateTime",
+ "name": "created_at!: DateTime",
"ordinal": 5,
"type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 6,
+ "type_info": "Text"
}
],
"parameters": {
@@ -42,9 +47,10 @@
false,
false,
true,
+ true,
false,
false
]
},
- "hash": "420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf"
+ "hash": "08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee"
}
diff --git a/backend/.sqlx/query-205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff.json b/backend/.sqlx/query-1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078.json
similarity index 71%
rename from backend/.sqlx/query-205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff.json
rename to backend/.sqlx/query-1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078.json
index 78393305..42b33397 100644
--- a/backend/.sqlx/query-205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff.json
+++ b/backend/.sqlx/query-1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1 AND id != $2",
+ "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1 AND id != $2",
"describe": {
"columns": [
{
@@ -24,14 +24,19 @@
"type_info": "Text"
},
{
- "name": "created_at!: DateTime",
+ "name": "dev_script",
"ordinal": 4,
"type_info": "Text"
},
{
- "name": "updated_at!: DateTime",
+ "name": "created_at!: DateTime",
"ordinal": 5,
"type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 6,
+ "type_info": "Text"
}
],
"parameters": {
@@ -42,9 +47,10 @@
false,
false,
true,
+ true,
false,
false
]
},
- "hash": "205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff"
+ "hash": "1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078"
}
diff --git a/backend/.sqlx/query-412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1.json b/backend/.sqlx/query-412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1.json
new file mode 100644
index 00000000..7a6a9594
--- /dev/null
+++ b/backend/.sqlx/query-412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1.json
@@ -0,0 +1,104 @@
+{
+ "db_name": "SQLite",
+ "query": "SELECT \n ep.id as \"id!: Uuid\", \n ep.task_attempt_id as \"task_attempt_id!: Uuid\", \n ep.process_type as \"process_type!: ExecutionProcessType\",\n ep.executor_type,\n ep.status as \"status!: ExecutionProcessStatus\",\n ep.command, \n ep.args, \n ep.working_directory, \n ep.stdout, \n ep.stderr, \n ep.exit_code,\n ep.started_at as \"started_at!: DateTime\",\n ep.completed_at as \"completed_at?: DateTime\",\n ep.created_at as \"created_at!: DateTime\", \n ep.updated_at as \"updated_at!: DateTime\"\n FROM execution_processes ep\n JOIN task_attempts ta ON ep.task_attempt_id = ta.id\n JOIN tasks t ON ta.task_id = t.id\n WHERE ep.status = 'running' \n AND ep.process_type = 'devserver'\n AND t.project_id = $1\n ORDER BY ep.created_at ASC",
+ "describe": {
+ "columns": [
+ {
+ "name": "id!: Uuid",
+ "ordinal": 0,
+ "type_info": "Blob"
+ },
+ {
+ "name": "task_attempt_id!: Uuid",
+ "ordinal": 1,
+ "type_info": "Blob"
+ },
+ {
+ "name": "process_type!: ExecutionProcessType",
+ "ordinal": 2,
+ "type_info": "Text"
+ },
+ {
+ "name": "executor_type",
+ "ordinal": 3,
+ "type_info": "Text"
+ },
+ {
+ "name": "status!: ExecutionProcessStatus",
+ "ordinal": 4,
+ "type_info": "Text"
+ },
+ {
+ "name": "command",
+ "ordinal": 5,
+ "type_info": "Text"
+ },
+ {
+ "name": "args",
+ "ordinal": 6,
+ "type_info": "Text"
+ },
+ {
+ "name": "working_directory",
+ "ordinal": 7,
+ "type_info": "Text"
+ },
+ {
+ "name": "stdout",
+ "ordinal": 8,
+ "type_info": "Text"
+ },
+ {
+ "name": "stderr",
+ "ordinal": 9,
+ "type_info": "Text"
+ },
+ {
+ "name": "exit_code",
+ "ordinal": 10,
+ "type_info": "Integer"
+ },
+ {
+ "name": "started_at!: DateTime",
+ "ordinal": 11,
+ "type_info": "Text"
+ },
+ {
+ "name": "completed_at?: DateTime",
+ "ordinal": 12,
+ "type_info": "Text"
+ },
+ {
+ "name": "created_at!: DateTime",
+ "ordinal": 13,
+ "type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 14,
+ "type_info": "Text"
+ }
+ ],
+ "parameters": {
+ "Right": 1
+ },
+ "nullable": [
+ true,
+ false,
+ false,
+ true,
+ false,
+ false,
+ true,
+ false,
+ true,
+ true,
+ true,
+ false,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1"
+}
diff --git a/backend/.sqlx/query-b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc.json b/backend/.sqlx/query-42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93.json
similarity index 67%
rename from backend/.sqlx/query-b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc.json
rename to backend/.sqlx/query-42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93.json
index 3de82da1..e3011e96 100644
--- a/backend/.sqlx/query-b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc.json
+++ b/backend/.sqlx/query-42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "UPDATE projects SET name = $2, git_repo_path = $3, setup_script = $4 WHERE id = $1 RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"",
+ "query": "UPDATE projects SET name = $2, git_repo_path = $3, setup_script = $4, dev_script = $5 WHERE id = $1 RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"",
"describe": {
"columns": [
{
@@ -24,27 +24,33 @@
"type_info": "Text"
},
{
- "name": "created_at!: DateTime",
+ "name": "dev_script",
"ordinal": 4,
"type_info": "Text"
},
{
- "name": "updated_at!: DateTime",
+ "name": "created_at!: DateTime",
"ordinal": 5,
"type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 6,
+ "type_info": "Text"
}
],
"parameters": {
- "Right": 4
+ "Right": 5
},
"nullable": [
true,
false,
false,
true,
+ true,
false,
false
]
},
- "hash": "b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc"
+ "hash": "42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93"
}
diff --git a/backend/.sqlx/query-346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07.json b/backend/.sqlx/query-4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92.json
similarity index 72%
rename from backend/.sqlx/query-346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07.json
rename to backend/.sqlx/query-4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92.json
index 0ee5c3f0..a54de44d 100644
--- a/backend/.sqlx/query-346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07.json
+++ b/backend/.sqlx/query-4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE id = $1",
+ "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE id = $1",
"describe": {
"columns": [
{
@@ -24,14 +24,19 @@
"type_info": "Text"
},
{
- "name": "created_at!: DateTime",
+ "name": "dev_script",
"ordinal": 4,
"type_info": "Text"
},
{
- "name": "updated_at!: DateTime",
+ "name": "created_at!: DateTime",
"ordinal": 5,
"type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 6,
+ "type_info": "Text"
}
],
"parameters": {
@@ -42,9 +47,10 @@
false,
false,
true,
+ true,
false,
false
]
},
- "hash": "346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07"
+ "hash": "4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92"
}
diff --git a/backend/.sqlx/query-58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227.json b/backend/.sqlx/query-58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227.json
new file mode 100644
index 00000000..4214e7d9
--- /dev/null
+++ b/backend/.sqlx/query-58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227.json
@@ -0,0 +1,92 @@
+{
+ "db_name": "SQLite",
+ "query": "SELECT \n id as \"id!: Uuid\", \n task_attempt_id as \"task_attempt_id!: Uuid\", \n process_type as \"process_type!: ExecutionProcessType\",\n executor_type,\n status as \"status!: ExecutionProcessStatus\",\n command, \n args, \n working_directory, \n exit_code,\n started_at as \"started_at!: DateTime\",\n completed_at as \"completed_at?: DateTime\",\n created_at as \"created_at!: DateTime\", \n updated_at as \"updated_at!: DateTime\"\n FROM execution_processes \n WHERE task_attempt_id = $1 \n ORDER BY created_at ASC",
+ "describe": {
+ "columns": [
+ {
+ "name": "id!: Uuid",
+ "ordinal": 0,
+ "type_info": "Blob"
+ },
+ {
+ "name": "task_attempt_id!: Uuid",
+ "ordinal": 1,
+ "type_info": "Blob"
+ },
+ {
+ "name": "process_type!: ExecutionProcessType",
+ "ordinal": 2,
+ "type_info": "Text"
+ },
+ {
+ "name": "executor_type",
+ "ordinal": 3,
+ "type_info": "Text"
+ },
+ {
+ "name": "status!: ExecutionProcessStatus",
+ "ordinal": 4,
+ "type_info": "Text"
+ },
+ {
+ "name": "command",
+ "ordinal": 5,
+ "type_info": "Text"
+ },
+ {
+ "name": "args",
+ "ordinal": 6,
+ "type_info": "Text"
+ },
+ {
+ "name": "working_directory",
+ "ordinal": 7,
+ "type_info": "Text"
+ },
+ {
+ "name": "exit_code",
+ "ordinal": 8,
+ "type_info": "Integer"
+ },
+ {
+ "name": "started_at!: DateTime",
+ "ordinal": 9,
+ "type_info": "Text"
+ },
+ {
+ "name": "completed_at?: DateTime",
+ "ordinal": 10,
+ "type_info": "Text"
+ },
+ {
+ "name": "created_at!: DateTime",
+ "ordinal": 11,
+ "type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 12,
+ "type_info": "Text"
+ }
+ ],
+ "parameters": {
+ "Right": 1
+ },
+ "nullable": [
+ true,
+ false,
+ false,
+ true,
+ false,
+ false,
+ true,
+ false,
+ true,
+ false,
+ true,
+ false,
+ false
+ ]
+ },
+ "hash": "58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227"
+}
diff --git a/backend/.sqlx/query-64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864.json b/backend/.sqlx/query-5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285.json
similarity index 67%
rename from backend/.sqlx/query-64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864.json
rename to backend/.sqlx/query-5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285.json
index faf32688..5943a6bc 100644
--- a/backend/.sqlx/query-64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864.json
+++ b/backend/.sqlx/query-5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285.json
@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
- "query": "INSERT INTO projects (id, name, git_repo_path, setup_script) VALUES ($1, $2, $3, $4) RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"",
+ "query": "INSERT INTO projects (id, name, git_repo_path, setup_script, dev_script) VALUES ($1, $2, $3, $4, $5) RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"",
"describe": {
"columns": [
{
@@ -24,27 +24,33 @@
"type_info": "Text"
},
{
- "name": "created_at!: DateTime",
+ "name": "dev_script",
"ordinal": 4,
"type_info": "Text"
},
{
- "name": "updated_at!: DateTime",
+ "name": "created_at!: DateTime",
"ordinal": 5,
"type_info": "Text"
+ },
+ {
+ "name": "updated_at!: DateTime",
+ "ordinal": 6,
+ "type_info": "Text"
}
],
"parameters": {
- "Right": 4
+ "Right": 5
},
"nullable": [
true,
false,
false,
true,
+ true,
false,
false
]
},
- "hash": "64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864"
+ "hash": "5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285"
}
diff --git a/backend/migrations/20250625000000_add_dev_script_to_projects.sql b/backend/migrations/20250625000000_add_dev_script_to_projects.sql
new file mode 100644
index 00000000..d2c95d01
--- /dev/null
+++ b/backend/migrations/20250625000000_add_dev_script_to_projects.sql
@@ -0,0 +1,4 @@
+PRAGMA foreign_keys = ON;
+
+-- Add dev_script column to projects table
+ALTER TABLE projects ADD COLUMN dev_script TEXT DEFAULT '';
diff --git a/backend/src/bin/generate_types.rs b/backend/src/bin/generate_types.rs
index a8ac3c93..ddbddf59 100644
--- a/backend/src/bin/generate_types.rs
+++ b/backend/src/bin/generate_types.rs
@@ -103,6 +103,7 @@ fn main() {
vibe_kanban::models::task_attempt::WorktreeDiff::decl(),
vibe_kanban::models::task_attempt::BranchStatus::decl(),
vibe_kanban::models::execution_process::ExecutionProcess::decl(),
+ vibe_kanban::models::execution_process::ExecutionProcessSummary::decl(),
vibe_kanban::models::execution_process::ExecutionProcessStatus::decl(),
vibe_kanban::models::execution_process::ExecutionProcessType::decl(),
vibe_kanban::models::execution_process::CreateExecutionProcess::decl(),
diff --git a/backend/src/execution_monitor.rs b/backend/src/execution_monitor.rs
index 0321caf7..f52268e7 100644
--- a/backend/src/execution_monitor.rs
+++ b/backend/src/execution_monitor.rs
@@ -218,84 +218,100 @@ pub async fn execution_monitor(app_state: AppState) {
}
}
- // Check for orphaned task attempts AFTER handling completions
+ // Check for orphaned execution processes AFTER handling completions
// Add a small delay to ensure completed processes are properly handled first
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
- let running_process_ids =
- match TaskAttemptActivity::find_processes_with_latest_running_status(&app_state.db_pool)
+ let running_processes = match ExecutionProcess::find_running(&app_state.db_pool).await {
+ Ok(processes) => processes,
+ Err(e) => {
+ tracing::error!("Failed to query running execution processes: {}", e);
+ continue;
+ }
+ };
+
+ for process in running_processes {
+ // Additional check: if the process was recently updated, skip it
+ // This prevents race conditions with recent completions
+ let now = chrono::Utc::now();
+ let time_since_update = now - process.updated_at;
+ if time_since_update.num_seconds() < 10 {
+ // Process was updated within last 10 seconds, likely just completed
+ tracing::debug!(
+ "Skipping recently updated process {} (updated {} seconds ago)",
+ process.id,
+ time_since_update.num_seconds()
+ );
+ continue;
+ }
+
+ // Check if this process is not actually running in the app state
+ if !app_state
+ .has_running_execution(process.task_attempt_id)
.await
{
- Ok(processes) => processes,
- Err(e) => {
- tracing::error!("Failed to query running attempts: {}", e);
- continue;
- }
- };
+ // This is truly an orphaned execution process - mark it as failed
+ tracing::info!(
+ "Found orphaned execution process {} for task attempt {}",
+ process.id,
+ process.task_attempt_id
+ );
- for process_id in running_process_ids {
- // Get the execution process to find the task attempt ID
- let task_attempt_id =
- match ExecutionProcess::find_by_id(&app_state.db_pool, process_id).await {
- Ok(Some(process)) => {
- // Additional check: if the process was recently updated, skip it
- // This prevents race conditions with recent completions
- let now = chrono::Utc::now();
- let time_since_update = now - process.updated_at;
- if time_since_update.num_seconds() < 10 {
- // Process was updated within last 10 seconds, likely just completed
- tracing::debug!(
- "Skipping recently updated process {} (updated {} seconds ago)",
- process_id,
- time_since_update.num_seconds()
- );
- continue;
- }
- process.task_attempt_id
- }
- Ok(None) => {
- tracing::error!("Execution process {} not found", process_id);
- continue;
- }
- Err(e) => {
- tracing::error!("Failed to fetch execution process {}: {}", process_id, e);
- continue;
- }
- };
-
- // Double-check that this task attempt is not currently running and hasn't just completed
- if !app_state.has_running_execution(task_attempt_id).await {
- // This is truly an orphaned task attempt - mark it as failed
- let activity_id = Uuid::new_v4();
- let create_activity = CreateTaskAttemptActivity {
- execution_process_id: process_id,
- status: Some(TaskAttemptStatus::ExecutorFailed),
- note: Some("Execution lost (server restart or crash)".to_string()),
- };
-
- if let Err(e) = TaskAttemptActivity::create(
+ // Update the execution process status first
+ if let Err(e) = ExecutionProcess::update_completion(
&app_state.db_pool,
- &create_activity,
- activity_id,
- TaskAttemptStatus::ExecutorFailed,
+ process.id,
+ ExecutionProcessStatus::Failed,
+ None, // No exit code for orphaned processes
)
.await
{
tracing::error!(
- "Failed to create failed activity for orphaned process: {}",
+ "Failed to update orphaned execution process {} status: {}",
+ process.id,
e
);
- } else {
- tracing::info!("Marked orphaned execution process {} as failed", process_id);
+ continue;
+ }
- // Get task attempt and task to access task_id and project_id for status update
+ // Create task attempt activity for non-dev server processes
+ if process.process_type != ExecutionProcessType::DevServer {
+ let activity_id = Uuid::new_v4();
+ let create_activity = CreateTaskAttemptActivity {
+ execution_process_id: process.id,
+ status: Some(TaskAttemptStatus::ExecutorFailed),
+ note: Some("Execution lost (server restart or crash)".to_string()),
+ };
+
+ if let Err(e) = TaskAttemptActivity::create(
+ &app_state.db_pool,
+ &create_activity,
+ activity_id,
+ TaskAttemptStatus::ExecutorFailed,
+ )
+ .await
+ {
+ tracing::error!(
+ "Failed to create failed activity for orphaned process: {}",
+ e
+ );
+ continue;
+ }
+ }
+
+ tracing::info!("Marked orphaned execution process {} as failed", process.id);
+
+ // Update task status to InReview for coding agent and setup script failures
+ if matches!(
+ process.process_type,
+ ExecutionProcessType::CodingAgent | ExecutionProcessType::SetupScript
+ ) {
if let Ok(Some(task_attempt)) =
- TaskAttempt::find_by_id(&app_state.db_pool, task_attempt_id).await
+ TaskAttempt::find_by_id(&app_state.db_pool, process.task_attempt_id).await
{
if let Ok(Some(task)) =
Task::find_by_id(&app_state.db_pool, task_attempt.task_id).await
{
- // Update task status to InReview
if let Err(e) = Task::update_status(
&app_state.db_pool,
task.id,
@@ -518,11 +534,11 @@ async fn handle_coding_agent_completion(
/// Handle dev server completion (future functionality)
async fn handle_dev_server_completion(
- _app_state: &AppState,
+ app_state: &AppState,
task_attempt_id: Uuid,
- _execution_process_id: Uuid,
+ execution_process_id: Uuid,
_execution_process: ExecutionProcess,
- _success: bool,
+ success: bool,
exit_code: Option,
) {
let exit_text = if let Some(code) = exit_code {
@@ -537,6 +553,24 @@ async fn handle_dev_server_completion(
exit_text
);
- // Dev servers might restart automatically or have different completion semantics
- // For now, just log the completion
+ // Update execution process status instead of creating activity
+ let process_status = if success {
+ ExecutionProcessStatus::Completed
+ } else {
+ ExecutionProcessStatus::Failed
+ };
+
+ if let Err(e) = ExecutionProcess::update_completion(
+ &app_state.db_pool,
+ execution_process_id,
+ process_status,
+ exit_code,
+ )
+ .await
+ {
+ tracing::error!(
+ "Failed to update dev server execution process status: {}",
+ e
+ );
+ }
}
diff --git a/backend/src/executor.rs b/backend/src/executor.rs
index e2828f9f..57504320 100644
--- a/backend/src/executor.rs
+++ b/backend/src/executor.rs
@@ -91,6 +91,7 @@ pub trait Executor: Send + Sync {
#[derive(Debug, Clone)]
pub enum ExecutorType {
SetupScript(String),
+ DevServer(String),
CodingAgent(ExecutorConfig),
FollowUpCodingAgent {
config: ExecutorConfig,
diff --git a/backend/src/executors/dev_server.rs b/backend/src/executors/dev_server.rs
new file mode 100644
index 00000000..803a7ba3
--- /dev/null
+++ b/backend/src/executors/dev_server.rs
@@ -0,0 +1,44 @@
+use async_trait::async_trait;
+use tokio::process::{Child, Command};
+use uuid::Uuid;
+
+use crate::executor::{Executor, ExecutorError};
+use crate::models::project::Project;
+use crate::models::task::Task;
+
+/// Executor for running project dev server scripts
+pub struct DevServerExecutor {
+ pub script: String,
+}
+
+#[async_trait]
+impl Executor for DevServerExecutor {
+ async fn spawn(
+ &self,
+ pool: &sqlx::SqlitePool,
+ task_id: Uuid,
+ worktree_path: &str,
+ ) -> Result {
+ // Validate the task and project exist
+ let task = Task::find_by_id(pool, task_id)
+ .await?
+ .ok_or(ExecutorError::TaskNotFound)?;
+
+ let _project = Project::find_by_id(pool, task.project_id)
+ .await?
+ .ok_or(ExecutorError::TaskNotFound)?; // Reuse TaskNotFound for simplicity
+
+ let child = Command::new("bash")
+ .kill_on_drop(true)
+ .stdout(std::process::Stdio::piped())
+ .stderr(std::process::Stdio::piped())
+ .arg("-c")
+ .arg(&self.script)
+ .current_dir(worktree_path)
+ .process_group(0)
+ .spawn()
+ .map_err(ExecutorError::SpawnFailed)?;
+
+ Ok(child)
+ }
+}
diff --git a/backend/src/executors/mod.rs b/backend/src/executors/mod.rs
index 5367400f..2cc76479 100644
--- a/backend/src/executors/mod.rs
+++ b/backend/src/executors/mod.rs
@@ -1,9 +1,11 @@
pub mod amp;
pub mod claude;
+pub mod dev_server;
pub mod echo;
pub mod setup_script;
pub use amp::{AmpExecutor, AmpFollowupExecutor};
pub use claude::{ClaudeExecutor, ClaudeFollowupExecutor};
+pub use dev_server::DevServerExecutor;
pub use echo::EchoExecutor;
pub use setup_script::SetupScriptExecutor;
diff --git a/backend/src/executors/setup_script.rs b/backend/src/executors/setup_script.rs
index 7f854352..dc2812d7 100644
--- a/backend/src/executors/setup_script.rs
+++ b/backend/src/executors/setup_script.rs
@@ -35,6 +35,7 @@ impl Executor for SetupScriptExecutor {
.arg("-c")
.arg(&self.script)
.current_dir(worktree_path)
+ .process_group(0)
.spawn()
.map_err(ExecutorError::SpawnFailed)?;
diff --git a/backend/src/main.rs b/backend/src/main.rs
index 17a9f2bf..7a9c2376 100644
--- a/backend/src/main.rs
+++ b/backend/src/main.rs
@@ -83,14 +83,23 @@ async fn serve_file(path: &str) -> impl IntoResponse {
}
}
-async fn serve_sound_file(axum::extract::Path(filename): axum::extract::Path) -> impl IntoResponse {
- use tokio::fs;
+async fn serve_sound_file(
+ axum::extract::Path(filename): axum::extract::Path,
+) -> impl IntoResponse {
use std::path::Path;
+ use tokio::fs;
// Validate filename contains only expected sound files
- let valid_sounds = ["abstract-sound1.mp3", "abstract-sound2.mp3", "abstract-sound3.mp3",
- "abstract-sound4.mp3", "cow-mooing.mp3", "phone-vibration.mp3", "rooster.mp3"];
-
+ let valid_sounds = [
+ "abstract-sound1.mp3",
+ "abstract-sound2.mp3",
+ "abstract-sound3.mp3",
+ "abstract-sound4.mp3",
+ "cow-mooing.mp3",
+ "phone-vibration.mp3",
+ "rooster.mp3",
+ ];
+
if !valid_sounds.contains(&filename.as_str()) {
return Response::builder()
.status(StatusCode::NOT_FOUND)
@@ -99,21 +108,17 @@ async fn serve_sound_file(axum::extract::Path(filename): axum::extract::Path {
- Response::builder()
- .status(StatusCode::OK)
- .header(header::CONTENT_TYPE, HeaderValue::from_static("audio/mpeg"))
- .body(Body::from(content))
- .unwrap()
- }
- Err(_) => {
- Response::builder()
- .status(StatusCode::NOT_FOUND)
- .body(Body::from("Sound file not found"))
- .unwrap()
- }
+ Ok(content) => Response::builder()
+ .status(StatusCode::OK)
+ .header(header::CONTENT_TYPE, HeaderValue::from_static("audio/mpeg"))
+ .body(Body::from(content))
+ .unwrap(),
+ Err(_) => Response::builder()
+ .status(StatusCode::NOT_FOUND)
+ .body(Body::from("Sound file not found"))
+ .unwrap(),
}
}
diff --git a/backend/src/models/execution_process.rs b/backend/src/models/execution_process.rs
index 508d8c39..95a1a935 100644
--- a/backend/src/models/execution_process.rs
+++ b/backend/src/models/execution_process.rs
@@ -86,6 +86,24 @@ pub struct UpdateExecutionProcess {
pub completed_at: Option>,
}
+#[derive(Debug, Clone, FromRow, Serialize, Deserialize, TS)]
+#[ts(export)]
+pub struct ExecutionProcessSummary {
+ pub id: Uuid,
+ pub task_attempt_id: Uuid,
+ pub process_type: ExecutionProcessType,
+ pub executor_type: Option, // "echo", "claude", "amp", etc. - only for CodingAgent processes
+ pub status: ExecutionProcessStatus,
+ pub command: String,
+ pub args: Option, // JSON array of arguments
+ pub working_directory: String,
+ pub exit_code: Option,
+ pub started_at: DateTime,
+ pub completed_at: Option>,
+ pub created_at: DateTime,
+ pub updated_at: DateTime,
+}
+
impl ExecutionProcess {
/// Find execution process by ID
pub async fn find_by_id(pool: &SqlitePool, id: Uuid) -> Result
+
+
+
+
{error && (
diff --git a/frontend/src/components/tasks/ExecutionOutputViewer.tsx b/frontend/src/components/tasks/ExecutionOutputViewer.tsx
index 2f32d0f2..2b24a0b1 100644
--- a/frontend/src/components/tasks/ExecutionOutputViewer.tsx
+++ b/frontend/src/components/tasks/ExecutionOutputViewer.tsx
@@ -4,13 +4,30 @@ import { Button } from "@/components/ui/button";
import { Badge } from "@/components/ui/badge";
import { FileText, MessageSquare } from "lucide-react";
import { ConversationViewer } from "./ConversationViewer";
-import type { ExecutionProcess } from "shared/types";
+import type { ExecutionProcess, ExecutionProcessStatus } from "shared/types";
interface ExecutionOutputViewerProps {
executionProcess: ExecutionProcess;
executor?: string;
}
+const getExecutionProcessStatusDisplay = (
+ status: ExecutionProcessStatus
+): { label: string; color: string } => {
+ switch (status) {
+ case "running":
+ return { label: "Running", color: "bg-blue-500" };
+ case "completed":
+ return { label: "Completed", color: "bg-green-500" };
+ case "failed":
+ return { label: "Failed", color: "bg-red-500" };
+ case "killed":
+ return { label: "Stopped", color: "bg-gray-500" };
+ default:
+ return { label: "Unknown", color: "bg-gray-400" };
+ }
+};
+
export function ExecutionOutputViewer({
executionProcess,
executor,
@@ -93,17 +110,34 @@ export function ExecutionOutputViewer({
);
}
+ const statusDisplay = getExecutionProcessStatusDisplay(executionProcess.status);
+
return (
+ {/* Execution process header with status */}
+
+
+
+ {executionProcess.process_type.replace(/([A-Z])/g, ' $1').toLowerCase()}
+
+
+
+
{statusDisplay.label}
+
+ {executor && (
+
+ {executor}
+
+ )}
+
+
+
{/* View mode toggle for executors with valid JSONL */}
{isValidJsonl && hasStdout && (
-
- {executor} output
-
{jsonlFormat && (
{jsonlFormat} format
diff --git a/frontend/src/components/tasks/TaskDetailsPanel.tsx b/frontend/src/components/tasks/TaskDetailsPanel.tsx
index 0441816b..aa0f35fc 100644
--- a/frontend/src/components/tasks/TaskDetailsPanel.tsx
+++ b/frontend/src/components/tasks/TaskDetailsPanel.tsx
@@ -14,12 +14,19 @@ import {
StopCircle,
Send,
AlertCircle,
+ Play,
} from "lucide-react";
import { Button } from "@/components/ui/button";
import { Alert, AlertDescription } from "@/components/ui/alert";
import { Label } from "@/components/ui/label";
import { Chip } from "@/components/ui/chip";
import { Textarea } from "@/components/ui/textarea";
+import {
+ Tooltip,
+ TooltipContent,
+ TooltipProvider,
+ TooltipTrigger,
+} from "@/components/ui/tooltip";
import { ExecutionOutputViewer } from "./ExecutionOutputViewer";
import { EditorSelectionDialog } from "./EditorSelectionDialog";
@@ -44,11 +51,14 @@ import type {
ApiResponse,
TaskWithAttemptStatus,
ExecutionProcess,
+ ExecutionProcessSummary,
EditorType,
+ Project,
} from "shared/types";
interface TaskDetailsPanelProps {
task: TaskWithAttemptStatus | null;
+ project: Project | null;
projectId: string;
isOpen: boolean;
onClose: () => void;
@@ -125,6 +135,7 @@ const getAttemptStatusDisplay = (
export function TaskDetailsPanel({
task,
+ project,
projectId,
isOpen,
onClose,
@@ -135,12 +146,16 @@ export function TaskDetailsPanel({
const [selectedAttempt, setSelectedAttempt] = useState(
null
);
- const [attemptActivities, setAttemptActivities] = useState<
- TaskAttemptActivity[]
- >([]);
- const [executionProcesses, setExecutionProcesses] = useState<
- Record
- >({});
+ // Combined attempt data state
+ const [attemptData, setAttemptData] = useState<{
+ activities: TaskAttemptActivity[];
+ processes: ExecutionProcessSummary[];
+ runningProcessDetails: Record;
+ }>({
+ activities: [],
+ processes: [],
+ runningProcessDetails: {},
+ });
const [loading, setLoading] = useState(false);
const [isDescriptionExpanded, setIsDescriptionExpanded] = useState(false);
const [selectedExecutor, setSelectedExecutor] = useState("claude");
@@ -152,26 +167,38 @@ export function TaskDetailsPanel({
const [followUpMessage, setFollowUpMessage] = useState("");
const [isSendingFollowUp, setIsSendingFollowUp] = useState(false);
const [followUpError, setFollowUpError] = useState(null);
-
+ const [isStartingDevServer, setIsStartingDevServer] = useState(false);
+ const [devServerDetails, setDevServerDetails] =
+ useState(null);
+ const [isHoveringDevServer, setIsHoveringDevServer] = useState(false);
+
// Auto-scroll state
const [shouldAutoScroll, setShouldAutoScroll] = useState(true);
const scrollContainerRef = useRef(null);
const { config } = useConfig();
+ // Find running dev server in current project (across all task attempts)
+ const runningDevServer = useMemo(() => {
+ return attemptData.processes.find(
+ (process) =>
+ process.process_type === "devserver" && process.status === "running"
+ );
+ }, [attemptData.processes]);
+
// Handle ESC key locally to prevent global navigation
useEffect(() => {
if (!isOpen) return;
const handleKeyDown = (event: KeyboardEvent) => {
- if (event.key === 'Escape') {
+ if (event.key === "Escape") {
event.preventDefault();
event.stopPropagation();
onClose();
}
};
- document.addEventListener('keydown', handleKeyDown, true); // Use capture phase
- return () => document.removeEventListener('keydown', handleKeyDown, true);
+ document.addEventListener("keydown", handleKeyDown, true); // Use capture phase
+ return () => document.removeEventListener("keydown", handleKeyDown, true);
}, [isOpen, onClose]);
// Available executors
@@ -182,16 +209,15 @@ export function TaskDetailsPanel({
];
// Check if any execution process is currently running
- // We need to check the latest activity for each execution process
const isAttemptRunning = useMemo(() => {
- if (!selectedAttempt || attemptActivities.length === 0 || isStopping) {
+ if (!selectedAttempt || attemptData.activities.length === 0 || isStopping) {
return false;
}
// Group activities by execution_process_id and get the latest one for each
const latestActivitiesByProcess = new Map();
- attemptActivities.forEach((activity) => {
+ attemptData.activities.forEach((activity) => {
const existing = latestActivitiesByProcess.get(
activity.execution_process_id
);
@@ -209,21 +235,31 @@ export function TaskDetailsPanel({
activity.status === "setuprunning" ||
activity.status === "executorrunning"
);
- }, [selectedAttempt, attemptActivities, isStopping]);
+ }, [selectedAttempt, attemptData.activities, isStopping]);
// Check if follow-up should be enabled
const canSendFollowUp = useMemo(() => {
- if (!selectedAttempt || attemptActivities.length === 0 || isAttemptRunning || isSendingFollowUp) {
+ if (
+ !selectedAttempt ||
+ attemptData.activities.length === 0 ||
+ isAttemptRunning ||
+ isSendingFollowUp
+ ) {
return false;
}
// Need at least one completed coding agent execution
- const codingAgentActivities = attemptActivities.filter(
+ const codingAgentActivities = attemptData.activities.filter(
(activity) => activity.status === "executorcomplete"
);
return codingAgentActivities.length > 0;
- }, [selectedAttempt, attemptActivities, isAttemptRunning, isSendingFollowUp]);
+ }, [
+ selectedAttempt,
+ attemptData.activities,
+ isAttemptRunning,
+ isSendingFollowUp,
+ ]);
// Polling for updates when attempt is running
useEffect(() => {
@@ -231,13 +267,52 @@ export function TaskDetailsPanel({
const interval = setInterval(() => {
if (selectedAttempt) {
- fetchAttemptActivities(selectedAttempt.id, true);
+ fetchAttemptData(selectedAttempt.id, true);
}
}, 2000);
return () => clearInterval(interval);
}, [isAttemptRunning, task?.id, selectedAttempt?.id]);
+ // Fetch dev server details when hovering
+ const fetchDevServerDetails = async () => {
+ if (!runningDevServer || !task || !selectedAttempt) return;
+
+ try {
+ const response = await makeRequest(
+ `/api/projects/${projectId}/execution-processes/${runningDevServer.id}`
+ );
+ if (response.ok) {
+ const result: ApiResponse = await response.json();
+ if (result.success && result.data) {
+ setDevServerDetails(result.data);
+ }
+ }
+ } catch (err) {
+ console.error("Failed to fetch dev server details:", err);
+ }
+ };
+
+ // Poll dev server details while hovering
+ useEffect(() => {
+ if (!isHoveringDevServer || !runningDevServer) {
+ setDevServerDetails(null);
+ return;
+ }
+
+ // Fetch immediately
+ fetchDevServerDetails();
+
+ // Then poll every 2 seconds
+ const interval = setInterval(fetchDevServerDetails, 2000);
+ return () => clearInterval(interval);
+ }, [
+ isHoveringDevServer,
+ runningDevServer?.id,
+ task?.id,
+ selectedAttempt?.id,
+ ]);
+
// Set default executor from config
useEffect(() => {
if (config) {
@@ -254,16 +329,18 @@ export function TaskDetailsPanel({
// Auto-scroll to bottom when activities or execution processes change
useEffect(() => {
if (shouldAutoScroll && scrollContainerRef.current) {
- scrollContainerRef.current.scrollTop = scrollContainerRef.current.scrollHeight;
+ scrollContainerRef.current.scrollTop =
+ scrollContainerRef.current.scrollHeight;
}
- }, [attemptActivities, executionProcesses, shouldAutoScroll]);
+ }, [attemptData.activities, attemptData.processes, shouldAutoScroll]);
// Handle scroll events to detect manual scrolling
const handleScroll = useCallback(() => {
if (scrollContainerRef.current) {
- const { scrollTop, scrollHeight, clientHeight } = scrollContainerRef.current;
+ const { scrollTop, scrollHeight, clientHeight } =
+ scrollContainerRef.current;
const isAtBottom = scrollTop + clientHeight >= scrollHeight - 5; // 5px tolerance
-
+
if (isAtBottom && !shouldAutoScroll) {
setShouldAutoScroll(true);
} else if (!isAtBottom && shouldAutoScroll) {
@@ -294,12 +371,15 @@ export function TaskDetailsPanel({
: latest
);
setSelectedAttempt(latestAttempt);
- fetchAttemptActivities(latestAttempt.id);
+ fetchAttemptData(latestAttempt.id);
} else {
// Clear state when no attempts exist
setSelectedAttempt(null);
- setAttemptActivities([]);
- setExecutionProcesses({});
+ setAttemptData({
+ activities: [],
+ processes: [],
+ runningProcessDetails: {},
+ });
}
}
}
@@ -310,59 +390,74 @@ export function TaskDetailsPanel({
}
};
- const fetchAttemptActivities = async (
+ const fetchAttemptData = async (
attemptId: string,
_isBackgroundUpdate = false
) => {
if (!task) return;
try {
- const response = await makeRequest(
- `/api/projects/${projectId}/tasks/${task.id}/attempts/${attemptId}/activities`
- );
+ const [activitiesResponse, processesResponse] = await Promise.all([
+ makeRequest(
+ `/api/projects/${projectId}/tasks/${task.id}/attempts/${attemptId}/activities`
+ ),
+ makeRequest(
+ `/api/projects/${projectId}/tasks/${task.id}/attempts/${attemptId}/execution-processes`
+ ),
+ ]);
- if (response.ok) {
- const result: ApiResponse =
- await response.json();
- if (result.success && result.data) {
- setAttemptActivities(result.data);
+ if (activitiesResponse.ok && processesResponse.ok) {
+ const activitiesResult: ApiResponse =
+ await activitiesResponse.json();
+ const processesResult: ApiResponse =
+ await processesResponse.json();
- // Fetch execution processes for running activities
- const runningActivities = result.data.filter(
+ if (
+ activitiesResult.success &&
+ processesResult.success &&
+ activitiesResult.data &&
+ processesResult.data
+ ) {
+ // Find running activities that need detailed execution info
+ const runningActivities = activitiesResult.data.filter(
(activity) =>
activity.status === "setuprunning" ||
activity.status === "executorrunning"
);
+ // Fetch detailed execution info for running processes
+ const runningProcessDetails: Record = {};
for (const activity of runningActivities) {
- fetchExecutionProcess(activity.execution_process_id);
+ try {
+ const detailResponse = await makeRequest(
+ `/api/projects/${projectId}/execution-processes/${activity.execution_process_id}`
+ );
+ if (detailResponse.ok) {
+ const detailResult: ApiResponse =
+ await detailResponse.json();
+ if (detailResult.success && detailResult.data) {
+ runningProcessDetails[activity.execution_process_id] =
+ detailResult.data;
+ }
+ }
+ } catch (err) {
+ console.error(
+ `Failed to fetch execution process ${activity.execution_process_id}:`,
+ err
+ );
+ }
}
+
+ // Update all attempt data at once
+ setAttemptData({
+ activities: activitiesResult.data,
+ processes: processesResult.data,
+ runningProcessDetails,
+ });
}
}
} catch (err) {
- console.error("Failed to fetch attempt activities:", err);
- }
- };
-
- const fetchExecutionProcess = async (processId: string) => {
- if (!task) return;
-
- try {
- const response = await makeRequest(
- `/api/projects/${projectId}/execution-processes/${processId}`
- );
-
- if (response.ok) {
- const result: ApiResponse = await response.json();
- if (result.success && result.data) {
- setExecutionProcesses((prev) => ({
- ...prev,
- [processId]: result.data!,
- }));
- }
- }
- } catch (err) {
- console.error("Failed to fetch execution process:", err);
+ console.error("Failed to fetch attempt data:", err);
}
};
@@ -370,7 +465,7 @@ export function TaskDetailsPanel({
const attempt = taskAttempts.find((a) => a.id === attemptId);
if (attempt) {
setSelectedAttempt(attempt);
- fetchAttemptActivities(attempt.id);
+ fetchAttemptData(attempt.id);
}
};
@@ -401,6 +496,70 @@ export function TaskDetailsPanel({
}
};
+ const startDevServer = async () => {
+ if (!task || !selectedAttempt || !project?.dev_script) return;
+
+ setIsStartingDevServer(true);
+
+ try {
+ const response = await makeRequest(
+ `/api/projects/${projectId}/tasks/${task.id}/attempts/${selectedAttempt.id}/start-dev-server`,
+ {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ if (!response.ok) {
+ throw new Error("Failed to start dev server");
+ }
+
+ const data: ApiResponse = await response.json();
+
+ if (!data.success) {
+ throw new Error(data.message || "Failed to start dev server");
+ }
+
+ // Refresh activities to show the new dev server process
+ fetchAttemptData(selectedAttempt.id);
+ } catch (err) {
+ console.error("Failed to start dev server:", err);
+ } finally {
+ setIsStartingDevServer(false);
+ }
+ };
+
+ const stopDevServer = async () => {
+ if (!task || !selectedAttempt || !runningDevServer) return;
+
+ setIsStartingDevServer(true);
+
+ try {
+ const response = await makeRequest(
+ `/api/projects/${projectId}/tasks/${task.id}/attempts/${selectedAttempt.id}/execution-processes/${runningDevServer.id}/stop`,
+ {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ }
+ );
+
+ if (!response.ok) {
+ throw new Error("Failed to stop dev server");
+ }
+
+ // Refresh activities to show the stopped dev server
+ fetchAttemptData(selectedAttempt.id);
+ } catch (err) {
+ console.error("Failed to stop dev server:", err);
+ } finally {
+ setIsStartingDevServer(false);
+ }
+ };
+
const createNewAttempt = async (executor?: string) => {
if (!task) return;
@@ -443,13 +602,11 @@ export function TaskDetailsPanel({
);
if (response.ok) {
- // Clear cached execution processes since they should be stopped
- setExecutionProcesses({});
// Refresh activities to show updated status
- await fetchAttemptActivities(selectedAttempt.id);
+ await fetchAttemptData(selectedAttempt.id);
// Wait a bit for the backend to finish updating
setTimeout(() => {
- fetchAttemptActivities(selectedAttempt.id);
+ fetchAttemptData(selectedAttempt.id);
}, 1000);
}
} catch (err) {
@@ -494,13 +651,21 @@ export function TaskDetailsPanel({
// Clear the message
setFollowUpMessage("");
// Refresh activities to show the new follow-up execution
- fetchAttemptActivities(selectedAttempt.id);
+ fetchAttemptData(selectedAttempt.id);
} else {
const errorText = await response.text();
- setFollowUpError(`Failed to start follow-up execution: ${errorText || response.statusText}`);
+ setFollowUpError(
+ `Failed to start follow-up execution: ${
+ errorText || response.statusText
+ }`
+ );
}
} catch (err) {
- setFollowUpError(`Failed to send follow-up: ${err instanceof Error ? err.message : 'Unknown error'}`);
+ setFollowUpError(
+ `Failed to send follow-up: ${
+ err instanceof Error ? err.message : "Unknown error"
+ }`
+ );
} finally {
setIsSendingFollowUp(false);
}
@@ -618,9 +783,6 @@ export function TaskDetailsPanel({
selectedAttempt.created_at
).toLocaleTimeString()}
-
- Worktree: {selectedAttempt.worktree_path}
-
)}
@@ -721,6 +883,83 @@ export function TaskDetailsPanel({
{isStopping ? "Stopping..." : "Stop"}
)}
+
+
+
+ setIsHoveringDevServer(true)}
+ onMouseLeave={() => setIsHoveringDevServer(false)}
+ >
+
+
+
+
+ {!project?.dev_script ? (
+
+ Configure a dev server command in project
+ settings
+
+ ) : runningDevServer && devServerDetails ? (
+
+
+ Dev Server Logs (Last 10 lines):
+
+
+ {(() => {
+ const stdout =
+ devServerDetails.stdout || "";
+ const stderr =
+ devServerDetails.stderr || "";
+ const allOutput =
+ stdout + (stderr ? "\n" + stderr : "");
+ const lines = allOutput
+ .split("\n")
+ .filter((line) => line.trim());
+ const lastLines = lines.slice(-10);
+ return lastLines.length > 0
+ ? lastLines.join("\n")
+ : "No output yet...";
+ })()}
+
+
+ ) : null}
+
+
+
)}
- {attemptActivities.slice().map((activity) => (
+ {attemptData.activities.slice().map((activity) => (
{/* Compact activity message */}
@@ -825,22 +1064,22 @@ export function TaskDetailsPanel({
{/* Show stdio output for running processes */}
{(activity.status === "setuprunning" ||
activity.status === "executorrunning") &&
- executionProcesses[
+ attemptData.runningProcessDetails[
activity.execution_process_id
] && (
{
- if ((e.metaKey || e.ctrlKey) && e.key === 'Enter') {
+ if ((e.metaKey || e.ctrlKey) && e.key === "Enter") {
e.preventDefault();
- if (canSendFollowUp && followUpMessage.trim() && !isSendingFollowUp) {
+ if (
+ canSendFollowUp &&
+ followUpMessage.trim() &&
+ !isSendingFollowUp
+ ) {
handleSendFollowUp();
}
}
@@ -920,7 +1163,11 @@ export function TaskDetailsPanel({
/>