From fd0cdff0e42cf265b7dd546e94a2eb05ed490533 Mon Sep 17 00:00:00 2001 From: Louis Knight-Webb Date: Tue, 24 Jun 2025 16:50:58 +0100 Subject: [PATCH] Squashed commit of the following: commit ca21aa40163902dfb20582d6dced8c884b4b0119 Author: Louis Knight-Webb Date: Tue Jun 24 16:50:43 2025 +0100 Fixes commit 75c982209a71704d0df15982b9ac0aca87aa68de Author: Louis Knight-Webb Date: Tue Jun 24 16:35:58 2025 +0100 Improve process killing commit f58fd3b8a315880cc940d7e59719d23428c72e92 Author: Louis Knight-Webb Date: Tue Jun 24 16:23:59 2025 +0100 WIP commit 7a6cd4772e15a5df0d760fe79776979c3ba206e8 Author: Louis Knight-Webb Date: Tue Jun 24 12:34:13 2025 +0100 Fix dev server activity not showing commit 09eb3095c1850b5f3173b72b6b220811ef68524c Author: Louis Knight-Webb Date: Tue Jun 24 12:27:01 2025 +0100 Add activity for dev server commit 73db9a20312a8ed15c130760c6aacfa720d102d7 Author: Louis Knight-Webb Date: Tue Jun 24 12:04:38 2025 +0100 Lint commit 0a0ad901773e14f634ded8a68a108efc2fbca0ae Author: Louis Knight-Webb Date: Tue Jun 24 12:01:37 2025 +0100 WIP dev server --- ...ddfa8d5c9d42546fe36116a61f4db94611c3.json} | 14 +- ...521bae3479e3d2602e724d2c93e6fc85d8ee.json} | 14 +- ...6e1a03de71cc6c8adc62ef4873b79449f078.json} | 14 +- ...40407abce436cb81292d42b2dbe1e5c18eea1.json | 104 +++++ ...b65557f770894e21e654303047d4150cca93.json} | 16 +- ...509409e3763fa6e6c8a905c5f9536b2c9a92.json} | 14 +- ...d91299a339dc2b191462fc58c9736a56d5227.json | 92 ++++ ...c99f6014c4d9c0f965ff571ec75945132285.json} | 16 +- ...50625000000_add_dev_script_to_projects.sql | 4 + backend/src/bin/generate_types.rs | 1 + backend/src/execution_monitor.rs | 162 ++++--- backend/src/executor.rs | 1 + backend/src/executors/dev_server.rs | 44 ++ backend/src/executors/mod.rs | 2 + backend/src/executors/setup_script.rs | 1 + backend/src/main.rs | 43 +- backend/src/models/execution_process.rs | 84 ++++ backend/src/models/project.rs | 22 +- backend/src/models/task_attempt.rs | 71 ++- backend/src/routes/config.rs | 5 +- backend/src/routes/projects.rs | 3 +- backend/src/routes/task_attempts.rs | 186 ++++++-- .../src/components/projects/project-form.tsx | 23 + .../tasks/ExecutionOutputViewer.tsx | 42 +- .../src/components/tasks/TaskDetailsPanel.tsx | 419 ++++++++++++++---- frontend/src/pages/project-tasks.tsx | 1 + shared/types.ts | 8 +- 27 files changed, 1144 insertions(+), 262 deletions(-) rename backend/.sqlx/{query-b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0.json => query-056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3.json} (72%) rename backend/.sqlx/{query-420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf.json => query-08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee.json} (72%) rename backend/.sqlx/{query-205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff.json => query-1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078.json} (71%) create mode 100644 backend/.sqlx/query-412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1.json rename backend/.sqlx/{query-b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc.json => query-42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93.json} (67%) rename backend/.sqlx/{query-346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07.json => query-4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92.json} (72%) create mode 100644 backend/.sqlx/query-58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227.json rename backend/.sqlx/{query-64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864.json => query-5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285.json} (67%) create mode 100644 backend/migrations/20250625000000_add_dev_script_to_projects.sql create mode 100644 backend/src/executors/dev_server.rs diff --git a/backend/.sqlx/query-b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0.json b/backend/.sqlx/query-056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3.json similarity index 72% rename from backend/.sqlx/query-b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0.json rename to backend/.sqlx/query-056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3.json index 7734ad6e..8eda52c8 100644 --- a/backend/.sqlx/query-b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0.json +++ b/backend/.sqlx/query-056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1", + "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1", "describe": { "columns": [ { @@ -24,14 +24,19 @@ "type_info": "Text" }, { - "name": "created_at!: DateTime", + "name": "dev_script", "ordinal": 4, "type_info": "Text" }, { - "name": "updated_at!: DateTime", + "name": "created_at!: DateTime", "ordinal": 5, "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 6, + "type_info": "Text" } ], "parameters": { @@ -42,9 +47,10 @@ false, false, true, + true, false, false ] }, - "hash": "b62fa26fe7cdbee672504dbf63d3dbe19fca02a4a4f97d7df7143f340540efa0" + "hash": "056991f6ec992103f9de72475138ddfa8d5c9d42546fe36116a61f4db94611c3" } diff --git a/backend/.sqlx/query-420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf.json b/backend/.sqlx/query-08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee.json similarity index 72% rename from backend/.sqlx/query-420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf.json rename to backend/.sqlx/query-08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee.json index 114ed94b..1c096326 100644 --- a/backend/.sqlx/query-420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf.json +++ b/backend/.sqlx/query-08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects ORDER BY created_at DESC", + "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects ORDER BY created_at DESC", "describe": { "columns": [ { @@ -24,14 +24,19 @@ "type_info": "Text" }, { - "name": "created_at!: DateTime", + "name": "dev_script", "ordinal": 4, "type_info": "Text" }, { - "name": "updated_at!: DateTime", + "name": "created_at!: DateTime", "ordinal": 5, "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 6, + "type_info": "Text" } ], "parameters": { @@ -42,9 +47,10 @@ false, false, true, + true, false, false ] }, - "hash": "420c9eec0dd98062947b090bc695b67c2bcaba9862c06b701a9ba3d8a5b02abf" + "hash": "08f2cb03665a16640d6690f29920521bae3479e3d2602e724d2c93e6fc85d8ee" } diff --git a/backend/.sqlx/query-205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff.json b/backend/.sqlx/query-1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078.json similarity index 71% rename from backend/.sqlx/query-205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff.json rename to backend/.sqlx/query-1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078.json index 78393305..42b33397 100644 --- a/backend/.sqlx/query-205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff.json +++ b/backend/.sqlx/query-1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1 AND id != $2", + "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE git_repo_path = $1 AND id != $2", "describe": { "columns": [ { @@ -24,14 +24,19 @@ "type_info": "Text" }, { - "name": "created_at!: DateTime", + "name": "dev_script", "ordinal": 4, "type_info": "Text" }, { - "name": "updated_at!: DateTime", + "name": "created_at!: DateTime", "ordinal": 5, "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 6, + "type_info": "Text" } ], "parameters": { @@ -42,9 +47,10 @@ false, false, true, + true, false, false ] }, - "hash": "205da45211b3aa413684ecd76d065fc59f793da42da075246464ac776016f5ff" + "hash": "1f3dd0f80e984a8472457be40cd96e1a03de71cc6c8adc62ef4873b79449f078" } diff --git a/backend/.sqlx/query-412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1.json b/backend/.sqlx/query-412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1.json new file mode 100644 index 00000000..7a6a9594 --- /dev/null +++ b/backend/.sqlx/query-412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1.json @@ -0,0 +1,104 @@ +{ + "db_name": "SQLite", + "query": "SELECT \n ep.id as \"id!: Uuid\", \n ep.task_attempt_id as \"task_attempt_id!: Uuid\", \n ep.process_type as \"process_type!: ExecutionProcessType\",\n ep.executor_type,\n ep.status as \"status!: ExecutionProcessStatus\",\n ep.command, \n ep.args, \n ep.working_directory, \n ep.stdout, \n ep.stderr, \n ep.exit_code,\n ep.started_at as \"started_at!: DateTime\",\n ep.completed_at as \"completed_at?: DateTime\",\n ep.created_at as \"created_at!: DateTime\", \n ep.updated_at as \"updated_at!: DateTime\"\n FROM execution_processes ep\n JOIN task_attempts ta ON ep.task_attempt_id = ta.id\n JOIN tasks t ON ta.task_id = t.id\n WHERE ep.status = 'running' \n AND ep.process_type = 'devserver'\n AND t.project_id = $1\n ORDER BY ep.created_at ASC", + "describe": { + "columns": [ + { + "name": "id!: Uuid", + "ordinal": 0, + "type_info": "Blob" + }, + { + "name": "task_attempt_id!: Uuid", + "ordinal": 1, + "type_info": "Blob" + }, + { + "name": "process_type!: ExecutionProcessType", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "executor_type", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "status!: ExecutionProcessStatus", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "command", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "args", + "ordinal": 6, + "type_info": "Text" + }, + { + "name": "working_directory", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "stdout", + "ordinal": 8, + "type_info": "Text" + }, + { + "name": "stderr", + "ordinal": 9, + "type_info": "Text" + }, + { + "name": "exit_code", + "ordinal": 10, + "type_info": "Integer" + }, + { + "name": "started_at!: DateTime", + "ordinal": 11, + "type_info": "Text" + }, + { + "name": "completed_at?: DateTime", + "ordinal": 12, + "type_info": "Text" + }, + { + "name": "created_at!: DateTime", + "ordinal": 13, + "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 14, + "type_info": "Text" + } + ], + "parameters": { + "Right": 1 + }, + "nullable": [ + true, + false, + false, + true, + false, + false, + true, + false, + true, + true, + true, + false, + true, + false, + false + ] + }, + "hash": "412bacd3477d86369082e90f52240407abce436cb81292d42b2dbe1e5c18eea1" +} diff --git a/backend/.sqlx/query-b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc.json b/backend/.sqlx/query-42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93.json similarity index 67% rename from backend/.sqlx/query-b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc.json rename to backend/.sqlx/query-42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93.json index 3de82da1..e3011e96 100644 --- a/backend/.sqlx/query-b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc.json +++ b/backend/.sqlx/query-42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "UPDATE projects SET name = $2, git_repo_path = $3, setup_script = $4 WHERE id = $1 RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"", + "query": "UPDATE projects SET name = $2, git_repo_path = $3, setup_script = $4, dev_script = $5 WHERE id = $1 RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"", "describe": { "columns": [ { @@ -24,27 +24,33 @@ "type_info": "Text" }, { - "name": "created_at!: DateTime", + "name": "dev_script", "ordinal": 4, "type_info": "Text" }, { - "name": "updated_at!: DateTime", + "name": "created_at!: DateTime", "ordinal": 5, "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 6, + "type_info": "Text" } ], "parameters": { - "Right": 4 + "Right": 5 }, "nullable": [ true, false, false, true, + true, false, false ] }, - "hash": "b3bead952fd42b79bed0908db603726935c0e830ea74ff30064bac71185442fc" + "hash": "42c0c81bb893af019b5b91b48c3cb65557f770894e21e654303047d4150cca93" } diff --git a/backend/.sqlx/query-346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07.json b/backend/.sqlx/query-4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92.json similarity index 72% rename from backend/.sqlx/query-346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07.json rename to backend/.sqlx/query-4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92.json index 0ee5c3f0..a54de44d 100644 --- a/backend/.sqlx/query-346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07.json +++ b/backend/.sqlx/query-4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE id = $1", + "query": "SELECT id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\" FROM projects WHERE id = $1", "describe": { "columns": [ { @@ -24,14 +24,19 @@ "type_info": "Text" }, { - "name": "created_at!: DateTime", + "name": "dev_script", "ordinal": 4, "type_info": "Text" }, { - "name": "updated_at!: DateTime", + "name": "created_at!: DateTime", "ordinal": 5, "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 6, + "type_info": "Text" } ], "parameters": { @@ -42,9 +47,10 @@ false, false, true, + true, false, false ] }, - "hash": "346d58b8e0628d6a5936675beadc0a43ffa2dca384ed4f4b3a3abfcd09592c07" + "hash": "4fd26525fb4e2f606200695e1b62509409e3763fa6e6c8a905c5f9536b2c9a92" } diff --git a/backend/.sqlx/query-58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227.json b/backend/.sqlx/query-58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227.json new file mode 100644 index 00000000..4214e7d9 --- /dev/null +++ b/backend/.sqlx/query-58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227.json @@ -0,0 +1,92 @@ +{ + "db_name": "SQLite", + "query": "SELECT \n id as \"id!: Uuid\", \n task_attempt_id as \"task_attempt_id!: Uuid\", \n process_type as \"process_type!: ExecutionProcessType\",\n executor_type,\n status as \"status!: ExecutionProcessStatus\",\n command, \n args, \n working_directory, \n exit_code,\n started_at as \"started_at!: DateTime\",\n completed_at as \"completed_at?: DateTime\",\n created_at as \"created_at!: DateTime\", \n updated_at as \"updated_at!: DateTime\"\n FROM execution_processes \n WHERE task_attempt_id = $1 \n ORDER BY created_at ASC", + "describe": { + "columns": [ + { + "name": "id!: Uuid", + "ordinal": 0, + "type_info": "Blob" + }, + { + "name": "task_attempt_id!: Uuid", + "ordinal": 1, + "type_info": "Blob" + }, + { + "name": "process_type!: ExecutionProcessType", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "executor_type", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "status!: ExecutionProcessStatus", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "command", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "args", + "ordinal": 6, + "type_info": "Text" + }, + { + "name": "working_directory", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "exit_code", + "ordinal": 8, + "type_info": "Integer" + }, + { + "name": "started_at!: DateTime", + "ordinal": 9, + "type_info": "Text" + }, + { + "name": "completed_at?: DateTime", + "ordinal": 10, + "type_info": "Text" + }, + { + "name": "created_at!: DateTime", + "ordinal": 11, + "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 12, + "type_info": "Text" + } + ], + "parameters": { + "Right": 1 + }, + "nullable": [ + true, + false, + false, + true, + false, + false, + true, + false, + true, + false, + true, + false, + false + ] + }, + "hash": "58408c7a8cdeeda0bef359f1f9bd91299a339dc2b191462fc58c9736a56d5227" +} diff --git a/backend/.sqlx/query-64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864.json b/backend/.sqlx/query-5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285.json similarity index 67% rename from backend/.sqlx/query-64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864.json rename to backend/.sqlx/query-5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285.json index faf32688..5943a6bc 100644 --- a/backend/.sqlx/query-64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864.json +++ b/backend/.sqlx/query-5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "INSERT INTO projects (id, name, git_repo_path, setup_script) VALUES ($1, $2, $3, $4) RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"", + "query": "INSERT INTO projects (id, name, git_repo_path, setup_script, dev_script) VALUES ($1, $2, $3, $4, $5) RETURNING id as \"id!: Uuid\", name, git_repo_path, setup_script, dev_script, created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"", "describe": { "columns": [ { @@ -24,27 +24,33 @@ "type_info": "Text" }, { - "name": "created_at!: DateTime", + "name": "dev_script", "ordinal": 4, "type_info": "Text" }, { - "name": "updated_at!: DateTime", + "name": "created_at!: DateTime", "ordinal": 5, "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 6, + "type_info": "Text" } ], "parameters": { - "Right": 4 + "Right": 5 }, "nullable": [ true, false, false, true, + true, false, false ] }, - "hash": "64fd750d2f767096f94b28650018dc657ad41c6a0af908215f694100319b4864" + "hash": "5dc5d9e57b9dee5421b414f385a4c99f6014c4d9c0f965ff571ec75945132285" } diff --git a/backend/migrations/20250625000000_add_dev_script_to_projects.sql b/backend/migrations/20250625000000_add_dev_script_to_projects.sql new file mode 100644 index 00000000..d2c95d01 --- /dev/null +++ b/backend/migrations/20250625000000_add_dev_script_to_projects.sql @@ -0,0 +1,4 @@ +PRAGMA foreign_keys = ON; + +-- Add dev_script column to projects table +ALTER TABLE projects ADD COLUMN dev_script TEXT DEFAULT ''; diff --git a/backend/src/bin/generate_types.rs b/backend/src/bin/generate_types.rs index a8ac3c93..ddbddf59 100644 --- a/backend/src/bin/generate_types.rs +++ b/backend/src/bin/generate_types.rs @@ -103,6 +103,7 @@ fn main() { vibe_kanban::models::task_attempt::WorktreeDiff::decl(), vibe_kanban::models::task_attempt::BranchStatus::decl(), vibe_kanban::models::execution_process::ExecutionProcess::decl(), + vibe_kanban::models::execution_process::ExecutionProcessSummary::decl(), vibe_kanban::models::execution_process::ExecutionProcessStatus::decl(), vibe_kanban::models::execution_process::ExecutionProcessType::decl(), vibe_kanban::models::execution_process::CreateExecutionProcess::decl(), diff --git a/backend/src/execution_monitor.rs b/backend/src/execution_monitor.rs index 0321caf7..f52268e7 100644 --- a/backend/src/execution_monitor.rs +++ b/backend/src/execution_monitor.rs @@ -218,84 +218,100 @@ pub async fn execution_monitor(app_state: AppState) { } } - // Check for orphaned task attempts AFTER handling completions + // Check for orphaned execution processes AFTER handling completions // Add a small delay to ensure completed processes are properly handled first tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; - let running_process_ids = - match TaskAttemptActivity::find_processes_with_latest_running_status(&app_state.db_pool) + let running_processes = match ExecutionProcess::find_running(&app_state.db_pool).await { + Ok(processes) => processes, + Err(e) => { + tracing::error!("Failed to query running execution processes: {}", e); + continue; + } + }; + + for process in running_processes { + // Additional check: if the process was recently updated, skip it + // This prevents race conditions with recent completions + let now = chrono::Utc::now(); + let time_since_update = now - process.updated_at; + if time_since_update.num_seconds() < 10 { + // Process was updated within last 10 seconds, likely just completed + tracing::debug!( + "Skipping recently updated process {} (updated {} seconds ago)", + process.id, + time_since_update.num_seconds() + ); + continue; + } + + // Check if this process is not actually running in the app state + if !app_state + .has_running_execution(process.task_attempt_id) .await { - Ok(processes) => processes, - Err(e) => { - tracing::error!("Failed to query running attempts: {}", e); - continue; - } - }; + // This is truly an orphaned execution process - mark it as failed + tracing::info!( + "Found orphaned execution process {} for task attempt {}", + process.id, + process.task_attempt_id + ); - for process_id in running_process_ids { - // Get the execution process to find the task attempt ID - let task_attempt_id = - match ExecutionProcess::find_by_id(&app_state.db_pool, process_id).await { - Ok(Some(process)) => { - // Additional check: if the process was recently updated, skip it - // This prevents race conditions with recent completions - let now = chrono::Utc::now(); - let time_since_update = now - process.updated_at; - if time_since_update.num_seconds() < 10 { - // Process was updated within last 10 seconds, likely just completed - tracing::debug!( - "Skipping recently updated process {} (updated {} seconds ago)", - process_id, - time_since_update.num_seconds() - ); - continue; - } - process.task_attempt_id - } - Ok(None) => { - tracing::error!("Execution process {} not found", process_id); - continue; - } - Err(e) => { - tracing::error!("Failed to fetch execution process {}: {}", process_id, e); - continue; - } - }; - - // Double-check that this task attempt is not currently running and hasn't just completed - if !app_state.has_running_execution(task_attempt_id).await { - // This is truly an orphaned task attempt - mark it as failed - let activity_id = Uuid::new_v4(); - let create_activity = CreateTaskAttemptActivity { - execution_process_id: process_id, - status: Some(TaskAttemptStatus::ExecutorFailed), - note: Some("Execution lost (server restart or crash)".to_string()), - }; - - if let Err(e) = TaskAttemptActivity::create( + // Update the execution process status first + if let Err(e) = ExecutionProcess::update_completion( &app_state.db_pool, - &create_activity, - activity_id, - TaskAttemptStatus::ExecutorFailed, + process.id, + ExecutionProcessStatus::Failed, + None, // No exit code for orphaned processes ) .await { tracing::error!( - "Failed to create failed activity for orphaned process: {}", + "Failed to update orphaned execution process {} status: {}", + process.id, e ); - } else { - tracing::info!("Marked orphaned execution process {} as failed", process_id); + continue; + } - // Get task attempt and task to access task_id and project_id for status update + // Create task attempt activity for non-dev server processes + if process.process_type != ExecutionProcessType::DevServer { + let activity_id = Uuid::new_v4(); + let create_activity = CreateTaskAttemptActivity { + execution_process_id: process.id, + status: Some(TaskAttemptStatus::ExecutorFailed), + note: Some("Execution lost (server restart or crash)".to_string()), + }; + + if let Err(e) = TaskAttemptActivity::create( + &app_state.db_pool, + &create_activity, + activity_id, + TaskAttemptStatus::ExecutorFailed, + ) + .await + { + tracing::error!( + "Failed to create failed activity for orphaned process: {}", + e + ); + continue; + } + } + + tracing::info!("Marked orphaned execution process {} as failed", process.id); + + // Update task status to InReview for coding agent and setup script failures + if matches!( + process.process_type, + ExecutionProcessType::CodingAgent | ExecutionProcessType::SetupScript + ) { if let Ok(Some(task_attempt)) = - TaskAttempt::find_by_id(&app_state.db_pool, task_attempt_id).await + TaskAttempt::find_by_id(&app_state.db_pool, process.task_attempt_id).await { if let Ok(Some(task)) = Task::find_by_id(&app_state.db_pool, task_attempt.task_id).await { - // Update task status to InReview if let Err(e) = Task::update_status( &app_state.db_pool, task.id, @@ -518,11 +534,11 @@ async fn handle_coding_agent_completion( /// Handle dev server completion (future functionality) async fn handle_dev_server_completion( - _app_state: &AppState, + app_state: &AppState, task_attempt_id: Uuid, - _execution_process_id: Uuid, + execution_process_id: Uuid, _execution_process: ExecutionProcess, - _success: bool, + success: bool, exit_code: Option, ) { let exit_text = if let Some(code) = exit_code { @@ -537,6 +553,24 @@ async fn handle_dev_server_completion( exit_text ); - // Dev servers might restart automatically or have different completion semantics - // For now, just log the completion + // Update execution process status instead of creating activity + let process_status = if success { + ExecutionProcessStatus::Completed + } else { + ExecutionProcessStatus::Failed + }; + + if let Err(e) = ExecutionProcess::update_completion( + &app_state.db_pool, + execution_process_id, + process_status, + exit_code, + ) + .await + { + tracing::error!( + "Failed to update dev server execution process status: {}", + e + ); + } } diff --git a/backend/src/executor.rs b/backend/src/executor.rs index e2828f9f..57504320 100644 --- a/backend/src/executor.rs +++ b/backend/src/executor.rs @@ -91,6 +91,7 @@ pub trait Executor: Send + Sync { #[derive(Debug, Clone)] pub enum ExecutorType { SetupScript(String), + DevServer(String), CodingAgent(ExecutorConfig), FollowUpCodingAgent { config: ExecutorConfig, diff --git a/backend/src/executors/dev_server.rs b/backend/src/executors/dev_server.rs new file mode 100644 index 00000000..803a7ba3 --- /dev/null +++ b/backend/src/executors/dev_server.rs @@ -0,0 +1,44 @@ +use async_trait::async_trait; +use tokio::process::{Child, Command}; +use uuid::Uuid; + +use crate::executor::{Executor, ExecutorError}; +use crate::models::project::Project; +use crate::models::task::Task; + +/// Executor for running project dev server scripts +pub struct DevServerExecutor { + pub script: String, +} + +#[async_trait] +impl Executor for DevServerExecutor { + async fn spawn( + &self, + pool: &sqlx::SqlitePool, + task_id: Uuid, + worktree_path: &str, + ) -> Result { + // Validate the task and project exist + let task = Task::find_by_id(pool, task_id) + .await? + .ok_or(ExecutorError::TaskNotFound)?; + + let _project = Project::find_by_id(pool, task.project_id) + .await? + .ok_or(ExecutorError::TaskNotFound)?; // Reuse TaskNotFound for simplicity + + let child = Command::new("bash") + .kill_on_drop(true) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .arg("-c") + .arg(&self.script) + .current_dir(worktree_path) + .process_group(0) + .spawn() + .map_err(ExecutorError::SpawnFailed)?; + + Ok(child) + } +} diff --git a/backend/src/executors/mod.rs b/backend/src/executors/mod.rs index 5367400f..2cc76479 100644 --- a/backend/src/executors/mod.rs +++ b/backend/src/executors/mod.rs @@ -1,9 +1,11 @@ pub mod amp; pub mod claude; +pub mod dev_server; pub mod echo; pub mod setup_script; pub use amp::{AmpExecutor, AmpFollowupExecutor}; pub use claude::{ClaudeExecutor, ClaudeFollowupExecutor}; +pub use dev_server::DevServerExecutor; pub use echo::EchoExecutor; pub use setup_script::SetupScriptExecutor; diff --git a/backend/src/executors/setup_script.rs b/backend/src/executors/setup_script.rs index 7f854352..dc2812d7 100644 --- a/backend/src/executors/setup_script.rs +++ b/backend/src/executors/setup_script.rs @@ -35,6 +35,7 @@ impl Executor for SetupScriptExecutor { .arg("-c") .arg(&self.script) .current_dir(worktree_path) + .process_group(0) .spawn() .map_err(ExecutorError::SpawnFailed)?; diff --git a/backend/src/main.rs b/backend/src/main.rs index 17a9f2bf..7a9c2376 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -83,14 +83,23 @@ async fn serve_file(path: &str) -> impl IntoResponse { } } -async fn serve_sound_file(axum::extract::Path(filename): axum::extract::Path) -> impl IntoResponse { - use tokio::fs; +async fn serve_sound_file( + axum::extract::Path(filename): axum::extract::Path, +) -> impl IntoResponse { use std::path::Path; + use tokio::fs; // Validate filename contains only expected sound files - let valid_sounds = ["abstract-sound1.mp3", "abstract-sound2.mp3", "abstract-sound3.mp3", - "abstract-sound4.mp3", "cow-mooing.mp3", "phone-vibration.mp3", "rooster.mp3"]; - + let valid_sounds = [ + "abstract-sound1.mp3", + "abstract-sound2.mp3", + "abstract-sound3.mp3", + "abstract-sound4.mp3", + "cow-mooing.mp3", + "phone-vibration.mp3", + "rooster.mp3", + ]; + if !valid_sounds.contains(&filename.as_str()) { return Response::builder() .status(StatusCode::NOT_FOUND) @@ -99,21 +108,17 @@ async fn serve_sound_file(axum::extract::Path(filename): axum::extract::Path { - Response::builder() - .status(StatusCode::OK) - .header(header::CONTENT_TYPE, HeaderValue::from_static("audio/mpeg")) - .body(Body::from(content)) - .unwrap() - } - Err(_) => { - Response::builder() - .status(StatusCode::NOT_FOUND) - .body(Body::from("Sound file not found")) - .unwrap() - } + Ok(content) => Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, HeaderValue::from_static("audio/mpeg")) + .body(Body::from(content)) + .unwrap(), + Err(_) => Response::builder() + .status(StatusCode::NOT_FOUND) + .body(Body::from("Sound file not found")) + .unwrap(), } } diff --git a/backend/src/models/execution_process.rs b/backend/src/models/execution_process.rs index 508d8c39..95a1a935 100644 --- a/backend/src/models/execution_process.rs +++ b/backend/src/models/execution_process.rs @@ -86,6 +86,24 @@ pub struct UpdateExecutionProcess { pub completed_at: Option>, } +#[derive(Debug, Clone, FromRow, Serialize, Deserialize, TS)] +#[ts(export)] +pub struct ExecutionProcessSummary { + pub id: Uuid, + pub task_attempt_id: Uuid, + pub process_type: ExecutionProcessType, + pub executor_type: Option, // "echo", "claude", "amp", etc. - only for CodingAgent processes + pub status: ExecutionProcessStatus, + pub command: String, + pub args: Option, // JSON array of arguments + pub working_directory: String, + pub exit_code: Option, + pub started_at: DateTime, + pub completed_at: Option>, + pub created_at: DateTime, + pub updated_at: DateTime, +} + impl ExecutionProcess { /// Find execution process by ID pub async fn find_by_id(pool: &SqlitePool, id: Uuid) -> Result, sqlx::Error> { @@ -147,6 +165,36 @@ impl ExecutionProcess { .await } + /// Find execution process summaries for a task attempt (excluding stdio) + pub async fn find_summaries_by_task_attempt_id( + pool: &SqlitePool, + task_attempt_id: Uuid, + ) -> Result, sqlx::Error> { + sqlx::query_as!( + ExecutionProcessSummary, + r#"SELECT + id as "id!: Uuid", + task_attempt_id as "task_attempt_id!: Uuid", + process_type as "process_type!: ExecutionProcessType", + executor_type, + status as "status!: ExecutionProcessStatus", + command, + args, + working_directory, + exit_code, + started_at as "started_at!: DateTime", + completed_at as "completed_at?: DateTime", + created_at as "created_at!: DateTime", + updated_at as "updated_at!: DateTime" + FROM execution_processes + WHERE task_attempt_id = $1 + ORDER BY created_at ASC"#, + task_attempt_id + ) + .fetch_all(pool) + .await + } + /// Find running execution processes pub async fn find_running(pool: &SqlitePool) -> Result, sqlx::Error> { sqlx::query_as!( @@ -175,6 +223,42 @@ impl ExecutionProcess { .await } + /// Find running dev servers for a specific project + pub async fn find_running_dev_servers_by_project( + pool: &SqlitePool, + project_id: Uuid, + ) -> Result, sqlx::Error> { + sqlx::query_as!( + ExecutionProcess, + r#"SELECT + ep.id as "id!: Uuid", + ep.task_attempt_id as "task_attempt_id!: Uuid", + ep.process_type as "process_type!: ExecutionProcessType", + ep.executor_type, + ep.status as "status!: ExecutionProcessStatus", + ep.command, + ep.args, + ep.working_directory, + ep.stdout, + ep.stderr, + ep.exit_code, + ep.started_at as "started_at!: DateTime", + ep.completed_at as "completed_at?: DateTime", + ep.created_at as "created_at!: DateTime", + ep.updated_at as "updated_at!: DateTime" + FROM execution_processes ep + JOIN task_attempts ta ON ep.task_attempt_id = ta.id + JOIN tasks t ON ta.task_id = t.id + WHERE ep.status = 'running' + AND ep.process_type = 'devserver' + AND t.project_id = $1 + ORDER BY ep.created_at ASC"#, + project_id + ) + .fetch_all(pool) + .await + } + /// Create a new execution process pub async fn create( pool: &SqlitePool, diff --git a/backend/src/models/project.rs b/backend/src/models/project.rs index b3173ee0..d81073f3 100644 --- a/backend/src/models/project.rs +++ b/backend/src/models/project.rs @@ -11,6 +11,7 @@ pub struct Project { pub name: String, pub git_repo_path: String, pub setup_script: Option, + pub dev_script: Option, #[ts(type = "Date")] pub created_at: DateTime, @@ -25,6 +26,7 @@ pub struct CreateProject { pub git_repo_path: String, pub use_existing_repo: bool, pub setup_script: Option, + pub dev_script: Option, } #[derive(Debug, Deserialize, TS)] @@ -33,6 +35,7 @@ pub struct UpdateProject { pub name: Option, pub git_repo_path: Option, pub setup_script: Option, + pub dev_script: Option, } #[derive(Debug, Serialize, TS)] @@ -55,7 +58,7 @@ impl Project { pub async fn find_all(pool: &SqlitePool) -> Result, sqlx::Error> { sqlx::query_as!( Project, - r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects ORDER BY created_at DESC"# + r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, dev_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects ORDER BY created_at DESC"# ) .fetch_all(pool) .await @@ -64,7 +67,7 @@ impl Project { pub async fn find_by_id(pool: &SqlitePool, id: Uuid) -> Result, sqlx::Error> { sqlx::query_as!( Project, - r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects WHERE id = $1"#, + r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, dev_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects WHERE id = $1"#, id ) .fetch_optional(pool) @@ -77,7 +80,7 @@ impl Project { ) -> Result, sqlx::Error> { sqlx::query_as!( Project, - r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects WHERE git_repo_path = $1"#, + r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, dev_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects WHERE git_repo_path = $1"#, git_repo_path ) .fetch_optional(pool) @@ -91,7 +94,7 @@ impl Project { ) -> Result, sqlx::Error> { sqlx::query_as!( Project, - r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects WHERE git_repo_path = $1 AND id != $2"#, + r#"SELECT id as "id!: Uuid", name, git_repo_path, setup_script, dev_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" FROM projects WHERE git_repo_path = $1 AND id != $2"#, git_repo_path, exclude_id ) @@ -106,11 +109,12 @@ impl Project { ) -> Result { sqlx::query_as!( Project, - r#"INSERT INTO projects (id, name, git_repo_path, setup_script) VALUES ($1, $2, $3, $4) RETURNING id as "id!: Uuid", name, git_repo_path, setup_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime""#, + r#"INSERT INTO projects (id, name, git_repo_path, setup_script, dev_script) VALUES ($1, $2, $3, $4, $5) RETURNING id as "id!: Uuid", name, git_repo_path, setup_script, dev_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime""#, project_id, data.name, data.git_repo_path, - data.setup_script + data.setup_script, + data.dev_script ) .fetch_one(pool) .await @@ -122,14 +126,16 @@ impl Project { name: String, git_repo_path: String, setup_script: Option, + dev_script: Option, ) -> Result { sqlx::query_as!( Project, - r#"UPDATE projects SET name = $2, git_repo_path = $3, setup_script = $4 WHERE id = $1 RETURNING id as "id!: Uuid", name, git_repo_path, setup_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime""#, + r#"UPDATE projects SET name = $2, git_repo_path = $3, setup_script = $4, dev_script = $5 WHERE id = $1 RETURNING id as "id!: Uuid", name, git_repo_path, setup_script, dev_script, created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime""#, id, name, git_repo_path, - setup_script + setup_script, + dev_script ) .fetch_one(pool) .await diff --git a/backend/src/models/task_attempt.rs b/backend/src/models/task_attempt.rs index 86e96436..af267041 100644 --- a/backend/src/models/task_attempt.rs +++ b/backend/src/models/task_attempt.rs @@ -18,6 +18,7 @@ pub enum TaskAttemptError { Git(GitError), TaskNotFound, ProjectNotFound, + ValidationError(String), } impl std::fmt::Display for TaskAttemptError { @@ -27,6 +28,7 @@ impl std::fmt::Display for TaskAttemptError { TaskAttemptError::Git(e) => write!(f, "Git error: {}", e), TaskAttemptError::TaskNotFound => write!(f, "Task not found"), TaskAttemptError::ProjectNotFound => write!(f, "Project not found"), + TaskAttemptError::ValidationError(e) => write!(f, "Validation error: {}", e), } } } @@ -486,6 +488,49 @@ impl TaskAttempt { .await } + /// Start a dev server for this task attempt + pub async fn start_dev_server( + pool: &SqlitePool, + app_state: &crate::app_state::AppState, + attempt_id: Uuid, + task_id: Uuid, + project_id: Uuid, + ) -> Result<(), TaskAttemptError> { + let task_attempt = TaskAttempt::find_by_id(pool, attempt_id) + .await? + .ok_or(TaskAttemptError::TaskNotFound)?; + + // Get the project to access the dev_script + let project = crate::models::project::Project::find_by_id(pool, project_id) + .await? + .ok_or(TaskAttemptError::TaskNotFound)?; + + let dev_script = project.dev_script.ok_or_else(|| { + TaskAttemptError::ValidationError( + "No dev script configured for this project".to_string(), + ) + })?; + + if dev_script.trim().is_empty() { + return Err(TaskAttemptError::ValidationError( + "Dev script is empty".to_string(), + )); + } + + Self::start_process_execution( + pool, + app_state, + attempt_id, + task_id, + crate::executor::ExecutorType::DevServer(dev_script), + "Starting dev server".to_string(), + TaskAttemptStatus::ExecutorRunning, // Dev servers don't create activities, just use generic status + crate::models::execution_process::ExecutionProcessType::DevServer, + &task_attempt.worktree_path, + ) + .await + } + /// Start a follow-up execution using the same executor type as the first process pub async fn start_followup_execution( pool: &SqlitePool, @@ -600,9 +645,14 @@ impl TaskAttempt { Self::create_executor_session_record(pool, attempt_id, task_id, process_id).await?; } - // Create activity record - Self::create_activity_record(pool, process_id, activity_status.clone(), &activity_note) - .await?; + // Create activity record (skip for dev servers as they run in parallel) + if !matches!( + process_type, + crate::models::execution_process::ExecutionProcessType::DevServer + ) { + Self::create_activity_record(pool, process_id, activity_status.clone(), &activity_note) + .await?; + } tracing::info!("Starting {} for task attempt {}", activity_note, attempt_id); @@ -646,6 +696,11 @@ impl TaskAttempt { Some(serde_json::to_string(&["-c", "setup_script"]).unwrap()), None, // Setup scripts don't have an executor type ), + crate::executor::ExecutorType::DevServer(_) => ( + "bash".to_string(), + Some(serde_json::to_string(&["-c", "dev_server"]).unwrap()), + None, // Dev servers don't have an executor type + ), crate::executor::ExecutorType::CodingAgent(config) => { let executor_type_str = match config { crate::executor::ExecutorConfig::Echo => "echo", @@ -748,7 +803,7 @@ impl TaskAttempt { process_id: Uuid, worktree_path: &str, ) -> Result { - use crate::executors::SetupScriptExecutor; + use crate::executors::{DevServerExecutor, SetupScriptExecutor}; let result = match executor_type { crate::executor::ExecutorType::SetupScript(script) => { @@ -759,6 +814,14 @@ impl TaskAttempt { .execute_streaming(pool, task_id, attempt_id, process_id, worktree_path) .await } + crate::executor::ExecutorType::DevServer(script) => { + let executor = DevServerExecutor { + script: script.clone(), + }; + executor + .execute_streaming(pool, task_id, attempt_id, process_id, worktree_path) + .await + } crate::executor::ExecutorType::CodingAgent(config) => { let executor = config.create_executor(); executor diff --git a/backend/src/routes/config.rs b/backend/src/routes/config.rs index e891ca49..e7e3dbe4 100644 --- a/backend/src/routes/config.rs +++ b/backend/src/routes/config.rs @@ -7,7 +7,10 @@ use axum::{ use std::sync::Arc; use tokio::sync::RwLock; -use crate::models::{config::{Config, EditorConstants, SoundConstants}, ApiResponse}; +use crate::models::{ + config::{Config, EditorConstants, SoundConstants}, + ApiResponse, +}; use crate::utils; use serde::{Deserialize, Serialize}; use ts_rs::TS; diff --git a/backend/src/routes/projects.rs b/backend/src/routes/projects.rs index 90c959bf..446b87ca 100644 --- a/backend/src/routes/projects.rs +++ b/backend/src/routes/projects.rs @@ -205,8 +205,9 @@ pub async fn update_project( .git_repo_path .unwrap_or(existing_project.git_repo_path.clone()); let setup_script = payload.setup_script.or(existing_project.setup_script); + let dev_script = payload.dev_script.or(existing_project.dev_script); - match Project::update(&pool, id, name, git_repo_path, setup_script).await { + match Project::update(&pool, id, name, git_repo_path, setup_script, dev_script).await { Ok(project) => Ok(ResponseJson(ApiResponse { success: true, data: Some(project), diff --git a/backend/src/routes/task_attempts.rs b/backend/src/routes/task_attempts.rs index 5d6891f7..22c6df20 100644 --- a/backend/src/routes/task_attempts.rs +++ b/backend/src/routes/task_attempts.rs @@ -11,7 +11,7 @@ use tokio::sync::RwLock; use uuid::Uuid; use crate::models::{ - execution_process::ExecutionProcess, + execution_process::{ExecutionProcess, ExecutionProcessSummary}, task::Task, task_attempt::{ BranchStatus, CreateFollowUpAttempt, CreateTaskAttempt, TaskAttempt, TaskAttemptStatus, @@ -431,7 +431,7 @@ pub async fn rebase_task_attempt( pub async fn get_task_attempt_execution_processes( Path((project_id, task_id, attempt_id)): Path<(Uuid, Uuid, Uuid)>, Extension(pool): Extension, -) -> Result>>, StatusCode> { +) -> Result>>, StatusCode> { // Verify task attempt exists and belongs to the correct task match TaskAttempt::exists_for_task(&pool, attempt_id, task_id, project_id).await { Ok(false) => return Err(StatusCode::NOT_FOUND), @@ -442,7 +442,7 @@ pub async fn get_task_attempt_execution_processes( Ok(true) => {} } - match ExecutionProcess::find_by_task_attempt_id(&pool, attempt_id).await { + match ExecutionProcess::find_summaries_by_task_attempt_id(&pool, attempt_id).await { Ok(processes) => Ok(ResponseJson(ApiResponse { success: true, data: Some(processes), @@ -549,30 +549,35 @@ pub async fn stop_all_execution_processes( tracing::error!("Failed to update execution process status: {}", e); errors.push(format!("Failed to update process {} status", process.id)); } else { - // Create a new activity record to mark as stopped - let activity_id = Uuid::new_v4(); - let create_activity = CreateTaskAttemptActivity { - execution_process_id: process.id, - status: Some(TaskAttemptStatus::ExecutorFailed), - note: Some(format!( - "Execution process {:?} ({}) stopped by user", - process.process_type, process.id - )), - }; + // Create activity record for stopped processes (skip dev servers) + if !matches!( + process.process_type, + crate::models::execution_process::ExecutionProcessType::DevServer + ) { + let activity_id = Uuid::new_v4(); + let create_activity = CreateTaskAttemptActivity { + execution_process_id: process.id, + status: Some(TaskAttemptStatus::ExecutorFailed), + note: Some(format!( + "Execution process {:?} ({}) stopped by user", + process.process_type, process.id + )), + }; - if let Err(e) = TaskAttemptActivity::create( - &pool, - &create_activity, - activity_id, - TaskAttemptStatus::ExecutorFailed, - ) - .await - { - tracing::error!("Failed to create stopped activity: {}", e); - errors.push(format!( - "Failed to create activity for process {}", - process.id - )); + if let Err(e) = TaskAttemptActivity::create( + &pool, + &create_activity, + activity_id, + TaskAttemptStatus::ExecutorFailed, + ) + .await + { + tracing::error!("Failed to create stopped activity: {}", e); + errors.push(format!( + "Failed to create activity for process {}", + process.id + )); + } } } } @@ -673,27 +678,32 @@ pub async fn stop_execution_process( return Err(StatusCode::INTERNAL_SERVER_ERROR); } - // Create a new activity record to mark as stopped - let activity_id = Uuid::new_v4(); - let create_activity = CreateTaskAttemptActivity { - execution_process_id: process_id, - status: Some(TaskAttemptStatus::ExecutorFailed), - note: Some(format!( - "Execution process {:?} ({}) stopped by user", - process.process_type, process_id - )), - }; + // Create activity record for stopped processes (skip dev servers) + if !matches!( + process.process_type, + crate::models::execution_process::ExecutionProcessType::DevServer + ) { + let activity_id = Uuid::new_v4(); + let create_activity = CreateTaskAttemptActivity { + execution_process_id: process_id, + status: Some(TaskAttemptStatus::ExecutorFailed), + note: Some(format!( + "Execution process {:?} ({}) stopped by user", + process.process_type, process_id + )), + }; - if let Err(e) = TaskAttemptActivity::create( - &pool, - &create_activity, - activity_id, - TaskAttemptStatus::ExecutorFailed, - ) - .await - { - tracing::error!("Failed to create stopped activity: {}", e); - return Err(StatusCode::INTERNAL_SERVER_ERROR); + if let Err(e) = TaskAttemptActivity::create( + &pool, + &create_activity, + activity_id, + TaskAttemptStatus::ExecutorFailed, + ) + .await + { + tracing::error!("Failed to create stopped activity: {}", e); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } } Ok(ResponseJson(ApiResponse { @@ -793,6 +803,86 @@ pub async fn create_followup_attempt( } } +pub async fn start_dev_server( + Path((project_id, task_id, attempt_id)): Path<(Uuid, Uuid, Uuid)>, + Extension(pool): Extension, + Extension(app_state): Extension, +) -> Result>, StatusCode> { + // Verify task attempt exists and belongs to the correct task + match TaskAttempt::exists_for_task(&pool, attempt_id, task_id, project_id).await { + Ok(false) => return Err(StatusCode::NOT_FOUND), + Err(e) => { + tracing::error!("Failed to check task attempt existence: {}", e); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + Ok(true) => {} + } + + // Stop any existing dev servers for this project + let existing_dev_servers = + match ExecutionProcess::find_running_dev_servers_by_project(&pool, project_id).await { + Ok(servers) => servers, + Err(e) => { + tracing::error!( + "Failed to find running dev servers for project {}: {}", + project_id, + e + ); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + }; + + for dev_server in existing_dev_servers { + tracing::info!( + "Stopping existing dev server {} for project {}", + dev_server.id, + project_id + ); + + // Stop the running process + if let Err(e) = app_state.stop_running_execution_by_id(dev_server.id).await { + tracing::error!("Failed to stop dev server {}: {}", dev_server.id, e); + } else { + // Update the execution process status in the database + if let Err(e) = ExecutionProcess::update_completion( + &pool, + dev_server.id, + crate::models::execution_process::ExecutionProcessStatus::Killed, + None, + ) + .await + { + tracing::error!( + "Failed to update dev server {} status: {}", + dev_server.id, + e + ); + } + } + } + + // Start dev server execution + match TaskAttempt::start_dev_server(&pool, &app_state, attempt_id, task_id, project_id).await { + Ok(_) => Ok(ResponseJson(ApiResponse { + success: true, + data: None, + message: Some("Dev server started successfully".to_string()), + })), + Err(e) => { + tracing::error!( + "Failed to start dev server for task attempt {}: {}", + attempt_id, + e + ); + Ok(ResponseJson(ApiResponse { + success: false, + data: None, + message: Some(e.to_string()), + })) + } + } +} + pub fn task_attempts_router() -> Router { use axum::routing::post; @@ -850,4 +940,8 @@ pub fn task_attempts_router() -> Router { "/projects/:project_id/tasks/:task_id/attempts/:attempt_id/follow-up", post(create_followup_attempt), ) + .route( + "/projects/:project_id/tasks/:task_id/attempts/:attempt_id/start-dev-server", + post(start_dev_server), + ) } diff --git a/frontend/src/components/projects/project-form.tsx b/frontend/src/components/projects/project-form.tsx index 0f9c43f2..1279d373 100644 --- a/frontend/src/components/projects/project-form.tsx +++ b/frontend/src/components/projects/project-form.tsx @@ -32,6 +32,7 @@ export function ProjectForm({ const [name, setName] = useState(project?.name || ""); const [gitRepoPath, setGitRepoPath] = useState(project?.git_repo_path || ""); const [setupScript, setSetupScript] = useState(project?.setup_script ?? ""); + const [devScript, setDevScript] = useState(project?.dev_script ?? ""); const [loading, setLoading] = useState(false); const [error, setError] = useState(""); const [showFolderPicker, setShowFolderPicker] = useState(false); @@ -47,10 +48,12 @@ export function ProjectForm({ setName(project.name || ""); setGitRepoPath(project.git_repo_path || ""); setSetupScript(project.setup_script ?? ""); + setDevScript(project.dev_script ?? ""); } else { setName(""); setGitRepoPath(""); setSetupScript(""); + setDevScript(""); } }, [project]); @@ -90,6 +93,7 @@ export function ProjectForm({ name, git_repo_path: finalGitRepoPath, setup_script: setupScript.trim() || null, + dev_script: devScript.trim() || null, }; const response = await makeRequest( `/api/projects/${project.id}`, @@ -113,6 +117,7 @@ export function ProjectForm({ git_repo_path: finalGitRepoPath, use_existing_repo: repoMode === "existing", setup_script: setupScript.trim() || null, + dev_script: devScript.trim() || null, }; const response = await makeRequest("/api/projects", { method: "POST", @@ -147,10 +152,12 @@ export function ProjectForm({ setName(project.name || ""); setGitRepoPath(project.git_repo_path || ""); setSetupScript(project.setup_script ?? ""); + setDevScript(project.dev_script ?? ""); } else { setName(""); setGitRepoPath(""); setSetupScript(""); + setDevScript(""); } setParentPath(""); setFolderName(""); @@ -316,6 +323,22 @@ export function ProjectForm({

+
+ +