Setup script for restored worktrees (#125)
This commit is contained in:
12
backend/.sqlx/query-1c7b06ba1e112abf6b945a2ff08a0b40ec23f3738c2e7399f067b558cf8d490e.json
generated
Normal file
12
backend/.sqlx/query-1c7b06ba1e112abf6b945a2ff08a0b40ec23f3738c2e7399f067b558cf8d490e.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "UPDATE task_attempts SET setup_completed_at = datetime('now'), updated_at = datetime('now') WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "1c7b06ba1e112abf6b945a2ff08a0b40ec23f3738c2e7399f067b558cf8d490e"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "UPDATE task_attempts SET worktree_path = $1, worktree_deleted = FALSE, updated_at = datetime('now') WHERE id = $2",
|
||||
"query": "UPDATE task_attempts SET worktree_path = $1, worktree_deleted = FALSE, setup_completed_at = NULL, updated_at = datetime('now') WHERE id = $2",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
@@ -8,5 +8,5 @@
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "bb11157b104cbf11180b2aa002b74fdba16f0980fc993381a5122b3119348e59"
|
||||
"hash": "5b902137b11022d2e1a5c4f6a9c83fec1a856c6a710aff831abd2382ede76b43"
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT ta.id AS \"id!: Uuid\",\n ta.task_id AS \"task_id!: Uuid\",\n ta.worktree_path,\n ta.branch,\n ta.base_branch,\n ta.merge_commit,\n ta.executor,\n ta.pr_url,\n ta.pr_number,\n ta.pr_status,\n ta.pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n ta.worktree_deleted as \"worktree_deleted!: bool\",\n ta.created_at AS \"created_at!: DateTime<Utc>\",\n ta.updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts ta\n JOIN tasks t ON ta.task_id = t.id\n WHERE ta.id = $1\n AND t.id = $2\n AND t.project_id = $3\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id!: Uuid",
|
||||
"ordinal": 0,
|
||||
"type_info": "Blob"
|
||||
},
|
||||
{
|
||||
"name": "task_id!: Uuid",
|
||||
"ordinal": 1,
|
||||
"type_info": "Blob"
|
||||
},
|
||||
{
|
||||
"name": "worktree_path",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "branch",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "base_branch",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "merge_commit",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "executor",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "pr_url",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "pr_number",
|
||||
"ordinal": 8,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "pr_status",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "pr_merged_at: DateTime<Utc>",
|
||||
"ordinal": 10,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "worktree_deleted!: bool",
|
||||
"ordinal": 11,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "6a0cbee771dc8467a391dbe5acfc0dd23869baaa3030b989cd527d873c26f2f6"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT INTO task_attempts (id, task_id, worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)\n RETURNING id as \"id!: Uuid\", task_id as \"task_id!: Uuid\", worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at as \"pr_merged_at: DateTime<Utc>\", worktree_deleted as \"worktree_deleted!: bool\", created_at as \"created_at!: DateTime<Utc>\", updated_at as \"updated_at!: DateTime<Utc>\"",
|
||||
"query": "INSERT INTO task_attempts (id, task_id, worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted, setup_completed_at)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)\n RETURNING id as \"id!: Uuid\", task_id as \"task_id!: Uuid\", worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at as \"pr_merged_at: DateTime<Utc>\", worktree_deleted as \"worktree_deleted!: bool\", setup_completed_at as \"setup_completed_at: DateTime<Utc>\", created_at as \"created_at!: DateTime<Utc>\", updated_at as \"updated_at!: DateTime<Utc>\"",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -64,18 +64,23 @@
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"name": "setup_completed_at: DateTime<Utc>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 12
|
||||
"Right": 13
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
@@ -90,9 +95,10 @@
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e1266b583bd1d164e76fb6a5a906dc229443ec6c437b8eab1c75c625e1cd1166"
|
||||
"hash": "6e8b860b14decfc2227dc57213f38442943d3fbef5c8418fd6b634c6e0f5e2ea"
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT ta.id AS \"id!: Uuid\",\n ta.task_id AS \"task_id!: Uuid\",\n ta.worktree_path,\n ta.branch,\n ta.base_branch,\n ta.merge_commit,\n ta.executor,\n ta.pr_url,\n ta.pr_number,\n ta.pr_status,\n ta.pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n ta.worktree_deleted AS \"worktree_deleted!: bool\",\n ta.created_at AS \"created_at!: DateTime<Utc>\",\n ta.updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts ta\n JOIN tasks t ON ta.task_id = t.id\n WHERE ta.id = $1 AND t.id = $2 AND t.project_id = $3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id!: Uuid",
|
||||
"ordinal": 0,
|
||||
"type_info": "Blob"
|
||||
},
|
||||
{
|
||||
"name": "task_id!: Uuid",
|
||||
"ordinal": 1,
|
||||
"type_info": "Blob"
|
||||
},
|
||||
{
|
||||
"name": "worktree_path",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "branch",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "base_branch",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "merge_commit",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "executor",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "pr_url",
|
||||
"ordinal": 7,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "pr_number",
|
||||
"ordinal": 8,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "pr_status",
|
||||
"ordinal": 9,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "pr_merged_at: DateTime<Utc>",
|
||||
"ordinal": 10,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "worktree_deleted!: bool",
|
||||
"ordinal": 11,
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 3
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "8a8e1a2a5156b6d0fce8dbfdf4d2fd19501c4246d9b362abc0c489ec0331bf4c"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT ta.id AS \"id!: Uuid\",\n ta.task_id AS \"task_id!: Uuid\",\n ta.worktree_path,\n ta.branch,\n ta.base_branch,\n ta.merge_commit,\n ta.executor,\n ta.pr_url,\n ta.pr_number,\n ta.pr_status,\n ta.pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n ta.worktree_deleted AS \"worktree_deleted!: bool\",\n ta.created_at AS \"created_at!: DateTime<Utc>\",\n ta.updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts ta\n JOIN tasks t ON ta.task_id = t.id\n JOIN projects p ON t.project_id = p.id\n WHERE ta.id = $1 AND t.id = $2 AND p.id = $3",
|
||||
"query": "SELECT ta.id AS \"id!: Uuid\",\n ta.task_id AS \"task_id!: Uuid\",\n ta.worktree_path,\n ta.branch,\n ta.base_branch,\n ta.merge_commit,\n ta.executor,\n ta.pr_url,\n ta.pr_number,\n ta.pr_status,\n ta.pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n ta.worktree_deleted AS \"worktree_deleted!: bool\",\n ta.setup_completed_at AS \"setup_completed_at: DateTime<Utc>\",\n ta.created_at AS \"created_at!: DateTime<Utc>\",\n ta.updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts ta\n JOIN tasks t ON ta.task_id = t.id\n JOIN projects p ON t.project_id = p.id\n WHERE ta.id = $1 AND t.id = $2 AND p.id = $3",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -64,13 +64,18 @@
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"name": "setup_completed_at: DateTime<Utc>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
@@ -90,9 +95,10 @@
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "9b601854d9adaf1e30ad2d2bed4efc477446de19e61c273bddbc852e8a2eb990"
|
||||
"hash": "92e8bdbcd80c5ff3db7a35cf79492048803ef305cbdef0d0a1fe5dc881ca8c71"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n worktree_path,\n branch,\n merge_commit,\n base_branch,\n executor,\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n created_at AS \"created_at!: DateTime<Utc>\",\n updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts\n WHERE id = $1",
|
||||
"query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n worktree_path,\n branch,\n merge_commit,\n base_branch,\n executor,\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime<Utc>\",\n created_at AS \"created_at!: DateTime<Utc>\",\n updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts\n WHERE id = $1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -64,13 +64,18 @@
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"name": "setup_completed_at: DateTime<Utc>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
@@ -90,9 +95,10 @@
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "de6fc34ba5af4479c00e2c03307d430d4b681d6390747a1cbb1e0a02c5918beb"
|
||||
"hash": "a6d2961718dbc3b1a925e549f49a159c561bef58c105529275f274b27e2eba5b"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n worktree_path,\n branch,\n base_branch,\n merge_commit,\n executor,\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n created_at AS \"created_at!: DateTime<Utc>\",\n updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts\n WHERE task_id = $1\n ORDER BY created_at DESC",
|
||||
"query": "SELECT id AS \"id!: Uuid\",\n task_id AS \"task_id!: Uuid\",\n worktree_path,\n branch,\n base_branch,\n merge_commit,\n executor,\n pr_url,\n pr_number,\n pr_status,\n pr_merged_at AS \"pr_merged_at: DateTime<Utc>\",\n worktree_deleted AS \"worktree_deleted!: bool\",\n setup_completed_at AS \"setup_completed_at: DateTime<Utc>\",\n created_at AS \"created_at!: DateTime<Utc>\",\n updated_at AS \"updated_at!: DateTime<Utc>\"\n FROM task_attempts\n WHERE task_id = $1\n ORDER BY created_at DESC",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -64,13 +64,18 @@
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"name": "setup_completed_at: DateTime<Utc>",
|
||||
"ordinal": 12,
|
||||
"type_info": "Datetime"
|
||||
},
|
||||
{
|
||||
"name": "created_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "updated_at!: DateTime<Utc>",
|
||||
"ordinal": 13,
|
||||
"ordinal": 14,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
@@ -90,9 +95,10 @@
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "430f8b4d013ce5bf1bd93242eea086c60e0125d76f9ffa6445addd155ec41059"
|
||||
"hash": "a9e93d5b09b29faf66e387e4d7596a792d81e75c4d3726e83c2963e8d7c9b56f"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT ta.id as \"attempt_id!: Uuid\", ta.worktree_path, p.git_repo_path as \"git_repo_path!\"\n FROM task_attempts ta\n JOIN execution_processes ep ON ta.id = ep.task_attempt_id\n JOIN tasks t ON ta.task_id = t.id\n JOIN projects p ON t.project_id = p.id\n WHERE ep.completed_at IS NOT NULL\n AND ta.worktree_deleted = FALSE\n GROUP BY ta.id, ta.worktree_path, p.git_repo_path\n HAVING datetime('now', '-24 hours') > datetime(MAX(ep.completed_at))\n AND ta.id NOT IN (\n SELECT DISTINCT ep2.task_attempt_id\n FROM execution_processes ep2\n WHERE ep2.completed_at IS NULL\n )\n ORDER BY MAX(ep.completed_at) ASC\n ",
|
||||
"query": "\n SELECT ta.id as \"attempt_id!: Uuid\", ta.worktree_path, p.git_repo_path as \"git_repo_path!\"\n FROM task_attempts ta\n JOIN execution_processes ep ON ta.id = ep.task_attempt_id\n JOIN tasks t ON ta.task_id = t.id\n JOIN projects p ON t.project_id = p.id\n WHERE ep.completed_at IS NOT NULL\n AND ta.worktree_deleted = FALSE\n GROUP BY ta.id, ta.worktree_path, p.git_repo_path\n HAVING datetime('now', '-1 minutes') > datetime(MAX(ep.completed_at))\n AND ta.id NOT IN (\n SELECT DISTINCT ep2.task_attempt_id\n FROM execution_processes ep2\n WHERE ep2.completed_at IS NULL\n )\n ORDER BY MAX(ep.completed_at) ASC\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -28,5 +28,5 @@
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "60044d33d4ac014a20f12c53610309814a268b417f25bc6dcde5d3eda13ce0ba"
|
||||
"hash": "deb5c6ffadf9e67460c37d82520da032bc472ff6d0bd658925b312b18c090f1a"
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Add setup completion tracking to task_attempts table
|
||||
-- This enables automatic setup script execution for recreated worktrees
|
||||
ALTER TABLE task_attempts ADD COLUMN setup_completed_at DATETIME;
|
||||
@@ -13,6 +13,122 @@ use crate::{
|
||||
utils::worktree_manager::WorktreeManager,
|
||||
};
|
||||
|
||||
/// Delegation context structure
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct DelegationContext {
|
||||
delegate_to: String,
|
||||
operation_params: DelegationOperationParams,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct DelegationOperationParams {
|
||||
task_id: uuid::Uuid,
|
||||
project_id: uuid::Uuid,
|
||||
attempt_id: uuid::Uuid,
|
||||
additional: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Parse delegation context from process args JSON
|
||||
fn parse_delegation_context(args_json: &str) -> Option<DelegationContext> {
|
||||
// Parse the args JSON array
|
||||
if let Ok(args_array) = serde_json::from_str::<serde_json::Value>(args_json) {
|
||||
if let Some(args) = args_array.as_array() {
|
||||
// Look for --delegation-context flag
|
||||
for (i, arg) in args.iter().enumerate() {
|
||||
if let Some(arg_str) = arg.as_str() {
|
||||
if arg_str == "--delegation-context" && i + 1 < args.len() {
|
||||
// Next argument should be the delegation context JSON
|
||||
if let Some(context_str) = args[i + 1].as_str() {
|
||||
if let Ok(context) =
|
||||
serde_json::from_str::<DelegationContext>(context_str)
|
||||
{
|
||||
return Some(context);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Handle delegation after setup completion
|
||||
async fn handle_setup_delegation(app_state: &AppState, delegation_context: DelegationContext) {
|
||||
let params = &delegation_context.operation_params;
|
||||
let task_id = params.task_id;
|
||||
let project_id = params.project_id;
|
||||
let attempt_id = params.attempt_id;
|
||||
|
||||
tracing::info!(
|
||||
"Delegating to {} after setup completion for attempt {}",
|
||||
delegation_context.delegate_to,
|
||||
attempt_id
|
||||
);
|
||||
|
||||
let result = match delegation_context.delegate_to.as_str() {
|
||||
"dev_server" => {
|
||||
ProcessService::start_dev_server_direct(
|
||||
&app_state.db_pool,
|
||||
app_state,
|
||||
attempt_id,
|
||||
task_id,
|
||||
project_id,
|
||||
)
|
||||
.await
|
||||
}
|
||||
"coding_agent" => {
|
||||
ProcessService::start_coding_agent(
|
||||
&app_state.db_pool,
|
||||
app_state,
|
||||
attempt_id,
|
||||
task_id,
|
||||
project_id,
|
||||
)
|
||||
.await
|
||||
}
|
||||
"followup" => {
|
||||
let prompt = params
|
||||
.additional
|
||||
.as_ref()
|
||||
.and_then(|a| a.get("prompt"))
|
||||
.and_then(|p| p.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
ProcessService::start_followup_execution_direct(
|
||||
&app_state.db_pool,
|
||||
app_state,
|
||||
attempt_id,
|
||||
task_id,
|
||||
project_id,
|
||||
prompt,
|
||||
)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
_ => {
|
||||
tracing::error!(
|
||||
"Unknown delegation target: {}",
|
||||
delegation_context.delegate_to
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = result {
|
||||
tracing::error!(
|
||||
"Failed to delegate to {} after setup completion: {}",
|
||||
delegation_context.delegate_to,
|
||||
e
|
||||
);
|
||||
} else {
|
||||
tracing::info!(
|
||||
"Successfully delegated to {} after setup completion",
|
||||
delegation_context.delegate_to
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Commit any unstaged changes in the worktree after execution completion
|
||||
async fn commit_execution_changes(
|
||||
worktree_path: &str,
|
||||
@@ -651,7 +767,7 @@ async fn handle_setup_completion(
|
||||
app_state: &AppState,
|
||||
task_attempt_id: Uuid,
|
||||
execution_process_id: Uuid,
|
||||
_execution_process: ExecutionProcess,
|
||||
execution_process: ExecutionProcess,
|
||||
success: bool,
|
||||
exit_code: Option<i64>,
|
||||
) {
|
||||
@@ -662,6 +778,16 @@ async fn handle_setup_completion(
|
||||
};
|
||||
|
||||
if success {
|
||||
// Mark setup as completed in database
|
||||
if let Err(e) = TaskAttempt::mark_setup_completed(&app_state.db_pool, task_attempt_id).await
|
||||
{
|
||||
tracing::error!(
|
||||
"Failed to mark setup as completed for attempt {}: {}",
|
||||
task_attempt_id,
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
// Setup completed successfully, create activity
|
||||
let activity_id = Uuid::new_v4();
|
||||
let create_activity = CreateTaskAttemptActivity {
|
||||
@@ -682,23 +808,39 @@ async fn handle_setup_completion(
|
||||
return;
|
||||
}
|
||||
|
||||
// Get task and project info to start coding agent
|
||||
if let Ok(Some(task_attempt)) =
|
||||
TaskAttempt::find_by_id(&app_state.db_pool, task_attempt_id).await
|
||||
{
|
||||
if let Ok(Some(task)) = Task::find_by_id(&app_state.db_pool, task_attempt.task_id).await
|
||||
// Check for delegation context in process args
|
||||
let delegation_result = if let Some(args_json) = &execution_process.args {
|
||||
parse_delegation_context(args_json)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(delegation_context) = delegation_result {
|
||||
// Delegate to the original operation
|
||||
handle_setup_delegation(app_state, delegation_context).await;
|
||||
} else {
|
||||
// Fallback to original behavior - start coding agent
|
||||
if let Ok(Some(task_attempt)) =
|
||||
TaskAttempt::find_by_id(&app_state.db_pool, task_attempt_id).await
|
||||
{
|
||||
// Start the coding agent
|
||||
if let Err(e) = ProcessService::start_coding_agent(
|
||||
&app_state.db_pool,
|
||||
app_state,
|
||||
task_attempt_id,
|
||||
task.id,
|
||||
task.project_id,
|
||||
)
|
||||
.await
|
||||
if let Ok(Some(task)) =
|
||||
Task::find_by_id(&app_state.db_pool, task_attempt.task_id).await
|
||||
{
|
||||
tracing::error!("Failed to start coding agent after setup completion: {}", e);
|
||||
// Start the coding agent
|
||||
if let Err(e) = ProcessService::start_coding_agent(
|
||||
&app_state.db_pool,
|
||||
app_state,
|
||||
task_attempt_id,
|
||||
task.id,
|
||||
task.project_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::error!(
|
||||
"Failed to start coding agent after setup completion: {}",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,6 +93,7 @@ pub struct TaskAttempt {
|
||||
pub pr_status: Option<String>, // open, closed, merged
|
||||
pub pr_merged_at: Option<DateTime<Utc>>, // When PR was merged
|
||||
pub worktree_deleted: bool, // Flag indicating if worktree has been cleaned up
|
||||
pub setup_completed_at: Option<DateTime<Utc>>, // When setup script was last completed
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
@@ -220,6 +221,7 @@ impl TaskAttempt {
|
||||
ta.pr_status,
|
||||
ta.pr_merged_at AS "pr_merged_at: DateTime<Utc>",
|
||||
ta.worktree_deleted AS "worktree_deleted!: bool",
|
||||
ta.setup_completed_at AS "setup_completed_at: DateTime<Utc>",
|
||||
ta.created_at AS "created_at!: DateTime<Utc>",
|
||||
ta.updated_at AS "updated_at!: DateTime<Utc>"
|
||||
FROM task_attempts ta
|
||||
@@ -293,6 +295,7 @@ impl TaskAttempt {
|
||||
pr_status,
|
||||
pr_merged_at AS "pr_merged_at: DateTime<Utc>",
|
||||
worktree_deleted AS "worktree_deleted!: bool",
|
||||
setup_completed_at AS "setup_completed_at: DateTime<Utc>",
|
||||
created_at AS "created_at!: DateTime<Utc>",
|
||||
updated_at AS "updated_at!: DateTime<Utc>"
|
||||
FROM task_attempts
|
||||
@@ -321,6 +324,7 @@ impl TaskAttempt {
|
||||
pr_status,
|
||||
pr_merged_at AS "pr_merged_at: DateTime<Utc>",
|
||||
worktree_deleted AS "worktree_deleted!: bool",
|
||||
setup_completed_at AS "setup_completed_at: DateTime<Utc>",
|
||||
created_at AS "created_at!: DateTime<Utc>",
|
||||
updated_at AS "updated_at!: DateTime<Utc>"
|
||||
FROM task_attempts
|
||||
@@ -370,7 +374,7 @@ impl TaskAttempt {
|
||||
WHERE ep.completed_at IS NOT NULL
|
||||
AND ta.worktree_deleted = FALSE
|
||||
GROUP BY ta.id, ta.worktree_path, p.git_repo_path
|
||||
HAVING datetime('now', '-24 hours') > datetime(MAX(ep.completed_at))
|
||||
HAVING datetime('now', '-1 minutes') > datetime(MAX(ep.completed_at))
|
||||
AND ta.id NOT IN (
|
||||
SELECT DISTINCT ep2.task_attempt_id
|
||||
FROM execution_processes ep2
|
||||
@@ -442,9 +446,9 @@ impl TaskAttempt {
|
||||
// Insert the record into the database
|
||||
Ok(sqlx::query_as!(
|
||||
TaskAttempt,
|
||||
r#"INSERT INTO task_attempts (id, task_id, worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||
RETURNING id as "id!: Uuid", task_id as "task_id!: Uuid", worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at as "pr_merged_at: DateTime<Utc>", worktree_deleted as "worktree_deleted!: bool", created_at as "created_at!: DateTime<Utc>", updated_at as "updated_at!: DateTime<Utc>""#,
|
||||
r#"INSERT INTO task_attempts (id, task_id, worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at, worktree_deleted, setup_completed_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||
RETURNING id as "id!: Uuid", task_id as "task_id!: Uuid", worktree_path, branch, base_branch, merge_commit, executor, pr_url, pr_number, pr_status, pr_merged_at as "pr_merged_at: DateTime<Utc>", worktree_deleted as "worktree_deleted!: bool", setup_completed_at as "setup_completed_at: DateTime<Utc>", created_at as "created_at!: DateTime<Utc>", updated_at as "updated_at!: DateTime<Utc>""#,
|
||||
attempt_id,
|
||||
task_id,
|
||||
worktree_path_str,
|
||||
@@ -456,7 +460,8 @@ impl TaskAttempt {
|
||||
Option::<i64>::None, // pr_number is None during creation
|
||||
Option::<String>::None, // pr_status is None during creation
|
||||
Option::<DateTime<Utc>>::None, // pr_merged_at is None during creation
|
||||
false // worktree_deleted is false during creation
|
||||
false, // worktree_deleted is false during creation
|
||||
Option::<DateTime<Utc>>::None // setup_completed_at is None during creation
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?)
|
||||
@@ -607,9 +612,9 @@ impl TaskAttempt {
|
||||
let new_worktree_path =
|
||||
Self::recreate_worktree_from_branch(pool, &task_attempt, project_id).await?;
|
||||
|
||||
// Update database with new path and reset worktree_deleted flag
|
||||
// Update database with new path, reset worktree_deleted flag, and clear setup completion
|
||||
sqlx::query!(
|
||||
"UPDATE task_attempts SET worktree_path = $1, worktree_deleted = FALSE, updated_at = datetime('now') WHERE id = $2",
|
||||
"UPDATE task_attempts SET worktree_path = $1, worktree_deleted = FALSE, setup_completed_at = NULL, updated_at = datetime('now') WHERE id = $2",
|
||||
new_worktree_path,
|
||||
attempt_id
|
||||
)
|
||||
@@ -1005,4 +1010,31 @@ impl TaskAttempt {
|
||||
coding_agent_process_id: coding_agent_process.map(|p| p.id.to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
/// Check if setup script has been completed for this worktree
|
||||
pub async fn is_setup_completed(
|
||||
pool: &SqlitePool,
|
||||
attempt_id: Uuid,
|
||||
) -> Result<bool, TaskAttemptError> {
|
||||
let task_attempt = Self::find_by_id(pool, attempt_id)
|
||||
.await?
|
||||
.ok_or(TaskAttemptError::TaskNotFound)?;
|
||||
|
||||
Ok(task_attempt.setup_completed_at.is_some())
|
||||
}
|
||||
|
||||
/// Mark setup script as completed for this worktree
|
||||
pub async fn mark_setup_completed(
|
||||
pool: &SqlitePool,
|
||||
attempt_id: Uuid,
|
||||
) -> Result<(), TaskAttemptError> {
|
||||
sqlx::query!(
|
||||
"UPDATE task_attempts SET setup_completed_at = datetime('now'), updated_at = datetime('now') WHERE id = ?",
|
||||
attempt_id
|
||||
)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,151 @@ use crate::{
|
||||
pub struct ProcessService;
|
||||
|
||||
impl ProcessService {
|
||||
/// Automatically run setup if needed, then continue with the specified operation
|
||||
pub async fn auto_setup_and_execute(
|
||||
pool: &SqlitePool,
|
||||
app_state: &crate::app_state::AppState,
|
||||
attempt_id: Uuid,
|
||||
task_id: Uuid,
|
||||
project_id: Uuid,
|
||||
operation: &str, // "dev_server", "coding_agent", or "followup"
|
||||
operation_params: Option<serde_json::Value>,
|
||||
) -> Result<(), TaskAttemptError> {
|
||||
// Check if setup is completed for this worktree
|
||||
let setup_completed = TaskAttempt::is_setup_completed(pool, attempt_id).await?;
|
||||
|
||||
// Get project to check if setup script exists
|
||||
let project = Project::find_by_id(pool, project_id)
|
||||
.await?
|
||||
.ok_or(TaskAttemptError::ProjectNotFound)?;
|
||||
|
||||
let needs_setup = Self::should_run_setup_script(&project) && !setup_completed;
|
||||
|
||||
if needs_setup {
|
||||
// Run setup with delegation to the original operation
|
||||
Self::execute_setup_with_delegation(
|
||||
pool,
|
||||
app_state,
|
||||
attempt_id,
|
||||
task_id,
|
||||
project_id,
|
||||
operation,
|
||||
operation_params,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
// Setup not needed or already completed, continue with original operation
|
||||
match operation {
|
||||
"dev_server" => {
|
||||
Self::start_dev_server_direct(pool, app_state, attempt_id, task_id, project_id)
|
||||
.await
|
||||
}
|
||||
"coding_agent" => {
|
||||
Self::start_coding_agent(pool, app_state, attempt_id, task_id, project_id).await
|
||||
}
|
||||
"followup" => {
|
||||
let prompt = operation_params
|
||||
.as_ref()
|
||||
.and_then(|p| p.get("prompt"))
|
||||
.and_then(|p| p.as_str())
|
||||
.unwrap_or("");
|
||||
Self::start_followup_execution_direct(
|
||||
pool, app_state, attempt_id, task_id, project_id, prompt,
|
||||
)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
_ => Err(TaskAttemptError::ValidationError(format!(
|
||||
"Unknown operation: {}",
|
||||
operation
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute setup script with delegation context for continuing after completion
|
||||
async fn execute_setup_with_delegation(
|
||||
pool: &SqlitePool,
|
||||
app_state: &crate::app_state::AppState,
|
||||
attempt_id: Uuid,
|
||||
task_id: Uuid,
|
||||
project_id: Uuid,
|
||||
delegate_to: &str,
|
||||
operation_params: Option<serde_json::Value>,
|
||||
) -> Result<(), TaskAttemptError> {
|
||||
let (task_attempt, project) =
|
||||
Self::load_execution_context(pool, attempt_id, project_id).await?;
|
||||
|
||||
// Create delegation context for execution monitor
|
||||
let delegation_context = serde_json::json!({
|
||||
"delegate_to": delegate_to,
|
||||
"operation_params": {
|
||||
"task_id": task_id,
|
||||
"project_id": project_id,
|
||||
"attempt_id": attempt_id,
|
||||
"additional": operation_params
|
||||
}
|
||||
});
|
||||
|
||||
// Create modified setup script execution with delegation context in args
|
||||
let setup_script = project.setup_script.as_ref().unwrap();
|
||||
let process_id = Uuid::new_v4();
|
||||
|
||||
// Create execution process record with delegation context
|
||||
let _execution_process = Self::create_execution_process_record_with_delegation(
|
||||
pool,
|
||||
attempt_id,
|
||||
process_id,
|
||||
setup_script,
|
||||
&task_attempt.worktree_path,
|
||||
delegation_context,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Create activity record
|
||||
Self::create_activity_record(
|
||||
pool,
|
||||
process_id,
|
||||
TaskAttemptStatus::SetupRunning,
|
||||
"Starting setup script with delegation",
|
||||
)
|
||||
.await?;
|
||||
|
||||
tracing::info!(
|
||||
"Starting setup script with delegation to {} for task attempt {}",
|
||||
delegate_to,
|
||||
attempt_id
|
||||
);
|
||||
|
||||
// Execute the setup script
|
||||
let child = Self::execute_setup_script_process(
|
||||
setup_script,
|
||||
pool,
|
||||
task_id,
|
||||
attempt_id,
|
||||
process_id,
|
||||
&task_attempt.worktree_path,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Register for monitoring
|
||||
Self::register_for_monitoring(
|
||||
app_state,
|
||||
process_id,
|
||||
attempt_id,
|
||||
&ExecutionProcessType::SetupScript,
|
||||
child,
|
||||
)
|
||||
.await;
|
||||
|
||||
tracing::info!(
|
||||
"Started setup execution with delegation {} for task attempt {}",
|
||||
process_id,
|
||||
attempt_id
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start the execution flow for a task attempt (setup script + executor)
|
||||
pub async fn start_execution(
|
||||
pool: &SqlitePool,
|
||||
@@ -80,13 +225,38 @@ impl ProcessService {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Start a dev server for this task attempt
|
||||
/// Start a dev server for this task attempt (with automatic setup)
|
||||
pub async fn start_dev_server(
|
||||
pool: &SqlitePool,
|
||||
app_state: &crate::app_state::AppState,
|
||||
attempt_id: Uuid,
|
||||
task_id: Uuid,
|
||||
project_id: Uuid,
|
||||
) -> Result<(), TaskAttemptError> {
|
||||
// Ensure worktree exists (recreate if needed for cold task support)
|
||||
let _worktree_path =
|
||||
TaskAttempt::ensure_worktree_exists(pool, attempt_id, project_id, "dev server").await?;
|
||||
|
||||
// Use automatic setup logic
|
||||
Self::auto_setup_and_execute(
|
||||
pool,
|
||||
app_state,
|
||||
attempt_id,
|
||||
task_id,
|
||||
project_id,
|
||||
"dev_server",
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Start a dev server directly without setup check (internal method)
|
||||
pub async fn start_dev_server_direct(
|
||||
pool: &SqlitePool,
|
||||
app_state: &crate::app_state::AppState,
|
||||
attempt_id: Uuid,
|
||||
task_id: Uuid,
|
||||
project_id: Uuid,
|
||||
) -> Result<(), TaskAttemptError> {
|
||||
// Ensure worktree exists (recreate if needed for cold task support)
|
||||
let worktree_path =
|
||||
@@ -138,7 +308,7 @@ impl ProcessService {
|
||||
result
|
||||
}
|
||||
|
||||
/// Start a follow-up execution using the same executor type as the first process
|
||||
/// Start a follow-up execution using the same executor type as the first process (with automatic setup)
|
||||
/// Returns the attempt_id that was actually used (always the original attempt_id for session continuity)
|
||||
pub async fn start_followup_execution(
|
||||
pool: &SqlitePool,
|
||||
@@ -174,10 +344,43 @@ impl ProcessService {
|
||||
|
||||
// Ensure worktree exists (recreate if needed for cold task support)
|
||||
// This will resurrect the worktree at the exact same path for session continuity
|
||||
let worktree_path =
|
||||
let _worktree_path =
|
||||
TaskAttempt::ensure_worktree_exists(pool, actual_attempt_id, project_id, "followup")
|
||||
.await?;
|
||||
|
||||
// Use automatic setup logic with followup parameters
|
||||
let operation_params = serde_json::json!({
|
||||
"prompt": prompt
|
||||
});
|
||||
|
||||
Self::auto_setup_and_execute(
|
||||
pool,
|
||||
app_state,
|
||||
attempt_id,
|
||||
task_id,
|
||||
project_id,
|
||||
"followup",
|
||||
Some(operation_params),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(actual_attempt_id)
|
||||
}
|
||||
|
||||
/// Start a follow-up execution directly without setup check (internal method)
|
||||
pub async fn start_followup_execution_direct(
|
||||
pool: &SqlitePool,
|
||||
app_state: &crate::app_state::AppState,
|
||||
attempt_id: Uuid,
|
||||
task_id: Uuid,
|
||||
project_id: Uuid,
|
||||
prompt: &str,
|
||||
) -> Result<Uuid, TaskAttemptError> {
|
||||
// Ensure worktree exists (recreate if needed for cold task support)
|
||||
// This will resurrect the worktree at the exact same path for session continuity
|
||||
let worktree_path =
|
||||
TaskAttempt::ensure_worktree_exists(pool, attempt_id, project_id, "followup").await?;
|
||||
|
||||
// Find the most recent coding agent execution process to get the executor type
|
||||
// Look up processes from the ORIGINAL attempt to find the session
|
||||
let execution_processes =
|
||||
@@ -261,7 +464,7 @@ impl ProcessService {
|
||||
let execution_result = Self::start_process_execution(
|
||||
pool,
|
||||
app_state,
|
||||
actual_attempt_id,
|
||||
attempt_id,
|
||||
task_id,
|
||||
followup_executor,
|
||||
"Starting follow-up executor".to_string(),
|
||||
@@ -287,7 +490,7 @@ impl ProcessService {
|
||||
Self::start_process_execution(
|
||||
pool,
|
||||
app_state,
|
||||
actual_attempt_id,
|
||||
attempt_id,
|
||||
task_id,
|
||||
new_session_executor,
|
||||
"Starting new executor session (follow-up session failed)".to_string(),
|
||||
@@ -301,7 +504,7 @@ impl ProcessService {
|
||||
execution_result?;
|
||||
}
|
||||
|
||||
Ok(actual_attempt_id)
|
||||
Ok(attempt_id)
|
||||
}
|
||||
|
||||
/// Unified function to start any type of process execution
|
||||
@@ -683,4 +886,58 @@ impl ProcessService {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Create execution process database record with delegation context
|
||||
async fn create_execution_process_record_with_delegation(
|
||||
pool: &SqlitePool,
|
||||
attempt_id: Uuid,
|
||||
process_id: Uuid,
|
||||
_setup_script: &str,
|
||||
worktree_path: &str,
|
||||
delegation_context: serde_json::Value,
|
||||
) -> Result<ExecutionProcess, TaskAttemptError> {
|
||||
let (shell_cmd, shell_arg) = get_shell_command();
|
||||
|
||||
// Store delegation context in args for execution monitor to read
|
||||
let args_with_delegation = serde_json::json!([
|
||||
shell_arg,
|
||||
"setup_script",
|
||||
"--delegation-context",
|
||||
delegation_context.to_string()
|
||||
]);
|
||||
|
||||
let create_process = CreateExecutionProcess {
|
||||
task_attempt_id: attempt_id,
|
||||
process_type: ExecutionProcessType::SetupScript,
|
||||
executor_type: None, // Setup scripts don't have an executor type
|
||||
command: shell_cmd.to_string(),
|
||||
args: Some(args_with_delegation.to_string()),
|
||||
working_directory: worktree_path.to_string(),
|
||||
};
|
||||
|
||||
ExecutionProcess::create(pool, &create_process, process_id)
|
||||
.await
|
||||
.map_err(TaskAttemptError::from)
|
||||
}
|
||||
|
||||
/// Execute setup script process specifically
|
||||
async fn execute_setup_script_process(
|
||||
setup_script: &str,
|
||||
pool: &SqlitePool,
|
||||
task_id: Uuid,
|
||||
attempt_id: Uuid,
|
||||
process_id: Uuid,
|
||||
worktree_path: &str,
|
||||
) -> Result<command_group::AsyncGroupChild, TaskAttemptError> {
|
||||
use crate::executors::SetupScriptExecutor;
|
||||
|
||||
let executor = SetupScriptExecutor {
|
||||
script: setup_script.to_string(),
|
||||
};
|
||||
|
||||
executor
|
||||
.execute_streaming(pool, task_id, attempt_id, process_id, worktree_path)
|
||||
.await
|
||||
.map_err(|e| TaskAttemptError::Git(git2::Error::from_str(&e.to_string())))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user