Add session-scoped routes for follow-up and queue operations (Vibe Kanban) (#1606)
* Yup, I'll hold off and do nothing for now. Let me know when you're ready to proceed with the session routes task, and I'll start exploring the codebase and planning the implementation. All cherry-pick conflicts resolved: 1. **`sessions/queue.rs`** - Fixed analytics to use `session.id` and `session.workspace_id` (2 locations) 2. **`task_attempts.rs`** - Removed the old `follow_up()` function that was already moved to `sessions/mod.rs` No remaining conflict markers in the routes directory. All fixes complete: 1. **`sessions/queue.rs`** ✓ - Fixed analytics to use `session.id` and `session.workspace_id` 2. **`task_attempts.rs`** ✓ - Removed old `follow_up` function (moved to sessions) 3. **`sessions/mod.rs`** ✓ - Fixed to use `find_latest_coding_agent_turn_session_id(pool, session.id)` instead of non-existent method The session-based `follow_up` now correctly scopes the agent_session_id lookup to the current session rather than across all workspace sessions. * All done: 1. ✓ `container.rs` - Changed to `find_latest_coding_agent_turn_session_id(pool, ctx.session.id)` 2. ✓ `pr.rs` - Changed to `find_latest_coding_agent_turn_session_id(pool, session.id)` 3. ✓ Deleted `find_latest_agent_session_id_by_workspace` from `execution_process.rs` All workspace-scoped `agent_session_id` lookups have been converted to session-scoped lookups and the unused method has been removed. Cleanup script changes for task attempt afc116fb-e639-4150-a299-b05c1f14f61e Done. Removed the unnecessary `Session::create` call and now using `&ctx.session` directly. Lint, db * Done. Simplified the validation: - Removed the extra `Session::find_by_id` DB call - Now directly checks `process.session_id != session.id` - Updated error message to "Process does not belong to this session"
This commit is contained in:
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT cat.agent_session_id\n FROM execution_processes ep\n JOIN coding_agent_turns cat ON ep.id = cat.execution_process_id\n JOIN sessions s ON ep.session_id = s.id\n WHERE s.workspace_id = $1\n AND ep.run_reason = 'codingagent'\n AND ep.dropped = FALSE\n AND cat.agent_session_id IS NOT NULL\n ORDER BY ep.created_at DESC\n LIMIT 1",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "agent_session_id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": [
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "b74d74a66fa902092d8ccbcb68dc22954b7f2a59f21b79c15cd6159654f4a8b5"
|
||||
}
|
||||
@@ -629,12 +629,12 @@ impl ExecutionProcess {
|
||||
})
|
||||
}
|
||||
|
||||
/// Fetch the latest CodingAgent executor profile for a workspace (across all sessions)
|
||||
pub async fn latest_executor_profile_for_workspace(
|
||||
/// Fetch the latest CodingAgent executor profile for a session
|
||||
pub async fn latest_executor_profile_for_session(
|
||||
pool: &SqlitePool,
|
||||
workspace_id: Uuid,
|
||||
session_id: Uuid,
|
||||
) -> Result<ExecutorProfileId, ExecutionProcessError> {
|
||||
// Find the latest CodingAgent execution process across all sessions for this workspace
|
||||
// Find the latest CodingAgent execution process for this session
|
||||
let latest_execution_process = sqlx::query_as!(
|
||||
ExecutionProcess,
|
||||
r#"SELECT
|
||||
@@ -650,10 +650,9 @@ impl ExecutionProcess {
|
||||
ep.created_at as "created_at!: DateTime<Utc>",
|
||||
ep.updated_at as "updated_at!: DateTime<Utc>"
|
||||
FROM execution_processes ep
|
||||
JOIN sessions s ON ep.session_id = s.id
|
||||
WHERE s.workspace_id = ? AND ep.run_reason = ? AND ep.dropped = FALSE
|
||||
WHERE ep.session_id = ? AND ep.run_reason = ? AND ep.dropped = FALSE
|
||||
ORDER BY ep.created_at DESC LIMIT 1"#,
|
||||
workspace_id,
|
||||
session_id,
|
||||
ExecutionProcessRunReason::CodingAgent
|
||||
)
|
||||
.fetch_optional(pool)
|
||||
@@ -680,34 +679,4 @@ impl ExecutionProcess {
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Find latest coding_agent_turn agent_session_id by workspace (across all sessions)
|
||||
pub async fn find_latest_agent_session_id_by_workspace(
|
||||
pool: &SqlitePool,
|
||||
workspace_id: Uuid,
|
||||
) -> Result<Option<String>, sqlx::Error> {
|
||||
tracing::info!(
|
||||
"Finding latest coding agent turn session id for workspace {}",
|
||||
workspace_id
|
||||
);
|
||||
let row = sqlx::query!(
|
||||
r#"SELECT cat.agent_session_id
|
||||
FROM execution_processes ep
|
||||
JOIN coding_agent_turns cat ON ep.id = cat.execution_process_id
|
||||
JOIN sessions s ON ep.session_id = s.id
|
||||
WHERE s.workspace_id = $1
|
||||
AND ep.run_reason = 'codingagent'
|
||||
AND ep.dropped = FALSE
|
||||
AND cat.agent_session_id IS NOT NULL
|
||||
ORDER BY ep.created_at DESC
|
||||
LIMIT 1"#,
|
||||
workspace_id
|
||||
)
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
tracing::info!("Latest coding agent turn session id: {:?}", row);
|
||||
|
||||
Ok(row.and_then(|r| r.agent_session_id))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ use db::{
|
||||
project_repo::ProjectRepo,
|
||||
repo::Repo,
|
||||
scratch::{DraftFollowUpData, Scratch, ScratchType},
|
||||
session::{CreateSession, Session},
|
||||
task::{Task, TaskStatus},
|
||||
workspace::Workspace,
|
||||
workspace_repo::WorkspaceRepo,
|
||||
@@ -742,13 +741,13 @@ impl LocalContainerService {
|
||||
ctx: &ExecutionContext,
|
||||
queued_data: &DraftFollowUpData,
|
||||
) -> Result<ExecutionProcess, ContainerError> {
|
||||
// Get executor profile from the latest CodingAgent process
|
||||
let initial_executor_profile_id = ExecutionProcess::latest_executor_profile_for_workspace(
|
||||
&self.db.pool,
|
||||
ctx.workspace.id,
|
||||
)
|
||||
// Get executor profile from the latest CodingAgent process in this session
|
||||
let initial_executor_profile_id =
|
||||
ExecutionProcess::latest_executor_profile_for_session(&self.db.pool, ctx.session.id)
|
||||
.await
|
||||
.map_err(|e| ContainerError::Other(anyhow!("Failed to get executor profile: {e}")))?;
|
||||
.map_err(|e| {
|
||||
ContainerError::Other(anyhow!("Failed to get executor profile: {e}"))
|
||||
})?;
|
||||
|
||||
let executor_profile_id = ExecutorProfileId {
|
||||
executor: initial_executor_profile_id.executor,
|
||||
@@ -756,9 +755,9 @@ impl LocalContainerService {
|
||||
};
|
||||
|
||||
// Get latest agent session ID for session continuity (from coding agent turns)
|
||||
let latest_agent_session_id = ExecutionProcess::find_latest_agent_session_id_by_workspace(
|
||||
let latest_agent_session_id = ExecutionProcess::find_latest_coding_agent_turn_session_id(
|
||||
&self.db.pool,
|
||||
ctx.workspace.id,
|
||||
ctx.session.id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -781,20 +780,9 @@ impl LocalContainerService {
|
||||
|
||||
let action = ExecutorAction::new(action_type, cleanup_action.map(Box::new));
|
||||
|
||||
// Create a new session for this follow-up
|
||||
let session = Session::create(
|
||||
&self.db.pool,
|
||||
&CreateSession {
|
||||
executor: Some(executor_profile_id.to_string()),
|
||||
},
|
||||
Uuid::new_v4(),
|
||||
ctx.workspace.id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
self.start_execution(
|
||||
&ctx.workspace,
|
||||
&session,
|
||||
&ctx.session,
|
||||
&action,
|
||||
&ExecutionProcessRunReason::CodingAgent,
|
||||
)
|
||||
|
||||
@@ -104,7 +104,7 @@ fn generate_types_content() -> String {
|
||||
server::routes::config::CheckEditorAvailabilityResponse::decl(),
|
||||
server::routes::config::CheckAgentAvailabilityQuery::decl(),
|
||||
server::routes::oauth::CurrentUserResponse::decl(),
|
||||
server::routes::task_attempts::CreateFollowUpAttempt::decl(),
|
||||
server::routes::sessions::CreateFollowUpAttempt::decl(),
|
||||
server::routes::task_attempts::ChangeTargetBranchRequest::decl(),
|
||||
server::routes::task_attempts::ChangeTargetBranchResponse::decl(),
|
||||
server::routes::task_attempts::MergeTaskAttemptRequest::decl(),
|
||||
|
||||
@@ -5,7 +5,7 @@ use axum::{
|
||||
response::Response,
|
||||
};
|
||||
use db::models::{
|
||||
execution_process::ExecutionProcess, project::Project, tag::Tag, task::Task,
|
||||
execution_process::ExecutionProcess, project::Project, session::Session, tag::Tag, task::Task,
|
||||
workspace::Workspace,
|
||||
};
|
||||
use deployment::Deployment;
|
||||
@@ -147,3 +147,25 @@ pub async fn load_tag_middleware(
|
||||
// Continue with the next middleware/handler
|
||||
Ok(next.run(request).await)
|
||||
}
|
||||
|
||||
pub async fn load_session_middleware(
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
Path(session_id): Path<Uuid>,
|
||||
mut request: Request,
|
||||
next: Next,
|
||||
) -> Result<Response, StatusCode> {
|
||||
let session = match Session::find_by_id(&deployment.db().pool, session_id).await {
|
||||
Ok(Some(session)) => session,
|
||||
Ok(None) => {
|
||||
tracing::warn!("Session {} not found", session_id);
|
||||
return Err(StatusCode::NOT_FOUND);
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to fetch session {}: {}", session_id, e);
|
||||
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
};
|
||||
|
||||
request.extensions_mut().insert(session);
|
||||
Ok(next.run(request).await)
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ pub fn router(deployment: DeploymentImpl) -> IntoMakeService<Router> {
|
||||
.merge(events::router(&deployment))
|
||||
.merge(approvals::router())
|
||||
.merge(scratch::router(&deployment))
|
||||
.merge(sessions::router())
|
||||
.merge(sessions::router(&deployment))
|
||||
.nest("/images", images::routes())
|
||||
.with_state(deployment);
|
||||
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
use axum::{
|
||||
Router,
|
||||
extract::{Query, State},
|
||||
response::Json as ResponseJson,
|
||||
routing::get,
|
||||
};
|
||||
use db::models::session::Session;
|
||||
use deployment::Deployment;
|
||||
use serde::Deserialize;
|
||||
use utils::response::ApiResponse;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{DeploymentImpl, error::ApiError};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct SessionQuery {
|
||||
pub workspace_id: Uuid,
|
||||
}
|
||||
|
||||
pub async fn get_sessions(
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
Query(query): Query<SessionQuery>,
|
||||
) -> Result<ResponseJson<ApiResponse<Vec<Session>>>, ApiError> {
|
||||
let pool = &deployment.db().pool;
|
||||
let sessions = Session::find_by_workspace_id(pool, query.workspace_id).await?;
|
||||
Ok(ResponseJson(ApiResponse::success(sessions)))
|
||||
}
|
||||
|
||||
pub fn router() -> Router<DeploymentImpl> {
|
||||
Router::new().route("/sessions", get(get_sessions))
|
||||
}
|
||||
240
crates/server/src/routes/sessions/mod.rs
Normal file
240
crates/server/src/routes/sessions/mod.rs
Normal file
@@ -0,0 +1,240 @@
|
||||
pub mod queue;
|
||||
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
extract::{Query, State},
|
||||
middleware::from_fn_with_state,
|
||||
response::Json as ResponseJson,
|
||||
routing::{get, post},
|
||||
};
|
||||
use db::models::{
|
||||
execution_process::{ExecutionProcess, ExecutionProcessRunReason},
|
||||
project_repo::ProjectRepo,
|
||||
scratch::{Scratch, ScratchType},
|
||||
session::{CreateSession, Session},
|
||||
workspace::{Workspace, WorkspaceError},
|
||||
};
|
||||
use deployment::Deployment;
|
||||
use executors::{
|
||||
actions::{
|
||||
ExecutorAction, ExecutorActionType, coding_agent_follow_up::CodingAgentFollowUpRequest,
|
||||
},
|
||||
profile::ExecutorProfileId,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use services::services::container::ContainerService;
|
||||
use sqlx::Error as SqlxError;
|
||||
use ts_rs::TS;
|
||||
use utils::response::ApiResponse;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
DeploymentImpl, error::ApiError, middleware::load_session_middleware,
|
||||
routes::task_attempts::util::restore_worktrees_to_process,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct SessionQuery {
|
||||
pub workspace_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, TS)]
|
||||
pub struct CreateSessionRequest {
|
||||
pub workspace_id: Uuid,
|
||||
pub executor: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn get_sessions(
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
Query(query): Query<SessionQuery>,
|
||||
) -> Result<ResponseJson<ApiResponse<Vec<Session>>>, ApiError> {
|
||||
let pool = &deployment.db().pool;
|
||||
let sessions = Session::find_by_workspace_id(pool, query.workspace_id).await?;
|
||||
Ok(ResponseJson(ApiResponse::success(sessions)))
|
||||
}
|
||||
|
||||
pub async fn get_session(
|
||||
Extension(session): Extension<Session>,
|
||||
) -> Result<ResponseJson<ApiResponse<Session>>, ApiError> {
|
||||
Ok(ResponseJson(ApiResponse::success(session)))
|
||||
}
|
||||
|
||||
pub async fn create_session(
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
Json(payload): Json<CreateSessionRequest>,
|
||||
) -> Result<ResponseJson<ApiResponse<Session>>, ApiError> {
|
||||
let pool = &deployment.db().pool;
|
||||
|
||||
// Verify workspace exists
|
||||
let _workspace = Workspace::find_by_id(pool, payload.workspace_id)
|
||||
.await?
|
||||
.ok_or(ApiError::Workspace(WorkspaceError::ValidationError(
|
||||
"Workspace not found".to_string(),
|
||||
)))?;
|
||||
|
||||
let session = Session::create(
|
||||
pool,
|
||||
&CreateSession {
|
||||
executor: payload.executor,
|
||||
},
|
||||
Uuid::new_v4(),
|
||||
payload.workspace_id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(ResponseJson(ApiResponse::success(session)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, TS)]
|
||||
pub struct CreateFollowUpAttempt {
|
||||
pub prompt: String,
|
||||
pub variant: Option<String>,
|
||||
pub retry_process_id: Option<Uuid>,
|
||||
pub force_when_dirty: Option<bool>,
|
||||
pub perform_git_reset: Option<bool>,
|
||||
}
|
||||
|
||||
pub async fn follow_up(
|
||||
Extension(session): Extension<Session>,
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
Json(payload): Json<CreateFollowUpAttempt>,
|
||||
) -> Result<ResponseJson<ApiResponse<ExecutionProcess>>, ApiError> {
|
||||
let pool = &deployment.db().pool;
|
||||
|
||||
// Load workspace from session
|
||||
let workspace = Workspace::find_by_id(pool, session.workspace_id)
|
||||
.await?
|
||||
.ok_or(ApiError::Workspace(WorkspaceError::ValidationError(
|
||||
"Workspace not found".to_string(),
|
||||
)))?;
|
||||
|
||||
tracing::info!("{:?}", workspace);
|
||||
|
||||
deployment
|
||||
.container()
|
||||
.ensure_container_exists(&workspace)
|
||||
.await?;
|
||||
|
||||
// Get executor profile data from the latest CodingAgent process in this session
|
||||
let initial_executor_profile_id =
|
||||
ExecutionProcess::latest_executor_profile_for_session(pool, session.id).await?;
|
||||
|
||||
let executor_profile_id = ExecutorProfileId {
|
||||
executor: initial_executor_profile_id.executor,
|
||||
variant: payload.variant,
|
||||
};
|
||||
|
||||
// Get parent task
|
||||
let task = workspace
|
||||
.parent_task(pool)
|
||||
.await?
|
||||
.ok_or(SqlxError::RowNotFound)?;
|
||||
|
||||
// Get parent project
|
||||
let project = task
|
||||
.parent_project(pool)
|
||||
.await?
|
||||
.ok_or(SqlxError::RowNotFound)?;
|
||||
|
||||
// If retry settings provided, perform replace-logic before proceeding
|
||||
if let Some(proc_id) = payload.retry_process_id {
|
||||
// Validate process belongs to this session
|
||||
let process =
|
||||
ExecutionProcess::find_by_id(pool, proc_id)
|
||||
.await?
|
||||
.ok_or(ApiError::Workspace(WorkspaceError::ValidationError(
|
||||
"Process not found".to_string(),
|
||||
)))?;
|
||||
if process.session_id != session.id {
|
||||
return Err(ApiError::Workspace(WorkspaceError::ValidationError(
|
||||
"Process does not belong to this session".to_string(),
|
||||
)));
|
||||
}
|
||||
|
||||
// Reset all repository worktrees to the state before the target process
|
||||
let force_when_dirty = payload.force_when_dirty.unwrap_or(false);
|
||||
let perform_git_reset = payload.perform_git_reset.unwrap_or(true);
|
||||
restore_worktrees_to_process(
|
||||
&deployment,
|
||||
pool,
|
||||
&workspace,
|
||||
proc_id,
|
||||
perform_git_reset,
|
||||
force_when_dirty,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Stop any running processes for this workspace (except dev server)
|
||||
deployment.container().try_stop(&workspace, false).await;
|
||||
|
||||
// Soft-drop the target process and all later processes in that session
|
||||
let _ = ExecutionProcess::drop_at_and_after(pool, process.session_id, proc_id).await?;
|
||||
}
|
||||
|
||||
let latest_agent_session_id =
|
||||
ExecutionProcess::find_latest_coding_agent_turn_session_id(pool, session.id).await?;
|
||||
|
||||
let prompt = payload.prompt;
|
||||
|
||||
let project_repos = ProjectRepo::find_by_project_id_with_names(pool, project.id).await?;
|
||||
let cleanup_action = deployment
|
||||
.container()
|
||||
.cleanup_actions_for_repos(&project_repos);
|
||||
|
||||
let action_type = if let Some(agent_session_id) = latest_agent_session_id {
|
||||
ExecutorActionType::CodingAgentFollowUpRequest(CodingAgentFollowUpRequest {
|
||||
prompt: prompt.clone(),
|
||||
session_id: agent_session_id,
|
||||
executor_profile_id: executor_profile_id.clone(),
|
||||
})
|
||||
} else {
|
||||
ExecutorActionType::CodingAgentInitialRequest(
|
||||
executors::actions::coding_agent_initial::CodingAgentInitialRequest {
|
||||
prompt,
|
||||
executor_profile_id: executor_profile_id.clone(),
|
||||
},
|
||||
)
|
||||
};
|
||||
|
||||
let action = ExecutorAction::new(action_type, cleanup_action.map(Box::new));
|
||||
|
||||
let execution_process = deployment
|
||||
.container()
|
||||
.start_execution(
|
||||
&workspace,
|
||||
&session,
|
||||
&action,
|
||||
&ExecutionProcessRunReason::CodingAgent,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Clear the draft follow-up scratch on successful spawn
|
||||
// This ensures the scratch is wiped even if the user navigates away quickly
|
||||
if let Err(e) = Scratch::delete(pool, workspace.id, &ScratchType::DraftFollowUp).await {
|
||||
// Log but don't fail the request - scratch deletion is best-effort
|
||||
tracing::debug!(
|
||||
"Failed to delete draft follow-up scratch for attempt {}: {}",
|
||||
workspace.id,
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
Ok(ResponseJson(ApiResponse::success(execution_process)))
|
||||
}
|
||||
|
||||
pub fn router(deployment: &DeploymentImpl) -> Router<DeploymentImpl> {
|
||||
let session_id_router = Router::new()
|
||||
.route("/", get(get_session))
|
||||
.route("/follow-up", post(follow_up))
|
||||
.layer(from_fn_with_state(
|
||||
deployment.clone(),
|
||||
load_session_middleware,
|
||||
));
|
||||
|
||||
let sessions_router = Router::new()
|
||||
.route("/", get(get_sessions).post(create_session))
|
||||
.nest("/{session_id}", session_id_router)
|
||||
.nest("/{session_id}/queue", queue::router(deployment));
|
||||
|
||||
Router::new().nest("/sessions", sessions_router)
|
||||
}
|
||||
@@ -2,14 +2,14 @@ use axum::{
|
||||
Extension, Json, Router, extract::State, middleware::from_fn_with_state,
|
||||
response::Json as ResponseJson, routing::get,
|
||||
};
|
||||
use db::models::{scratch::DraftFollowUpData, workspace::Workspace};
|
||||
use db::models::{scratch::DraftFollowUpData, session::Session};
|
||||
use deployment::Deployment;
|
||||
use serde::Deserialize;
|
||||
use services::services::queued_message::QueueStatus;
|
||||
use ts_rs::TS;
|
||||
use utils::response::ApiResponse;
|
||||
|
||||
use crate::{DeploymentImpl, error::ApiError, middleware::load_workspace_middleware};
|
||||
use crate::{DeploymentImpl, error::ApiError, middleware::load_session_middleware};
|
||||
|
||||
/// Request body for queueing a follow-up message
|
||||
#[derive(Debug, Deserialize, TS)]
|
||||
@@ -20,7 +20,7 @@ pub struct QueueMessageRequest {
|
||||
|
||||
/// Queue a follow-up message to be executed when the current execution finishes
|
||||
pub async fn queue_message(
|
||||
Extension(workspace): Extension<Workspace>,
|
||||
Extension(session): Extension<Session>,
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
Json(payload): Json<QueueMessageRequest>,
|
||||
) -> Result<ResponseJson<ApiResponse<QueueStatus>>, ApiError> {
|
||||
@@ -31,13 +31,14 @@ pub async fn queue_message(
|
||||
|
||||
let queued = deployment
|
||||
.queued_message_service()
|
||||
.queue_message(workspace.id, data);
|
||||
.queue_message(session.workspace_id, data);
|
||||
|
||||
deployment
|
||||
.track_if_analytics_allowed(
|
||||
"follow_up_queued",
|
||||
serde_json::json!({
|
||||
"workspace_id": workspace.id.to_string(),
|
||||
"session_id": session.id.to_string(),
|
||||
"workspace_id": session.workspace_id.to_string(),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
@@ -49,18 +50,19 @@ pub async fn queue_message(
|
||||
|
||||
/// Cancel a queued follow-up message
|
||||
pub async fn cancel_queued_message(
|
||||
Extension(workspace): Extension<Workspace>,
|
||||
Extension(session): Extension<Session>,
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
) -> Result<ResponseJson<ApiResponse<QueueStatus>>, ApiError> {
|
||||
deployment
|
||||
.queued_message_service()
|
||||
.cancel_queued(workspace.id);
|
||||
.cancel_queued(session.workspace_id);
|
||||
|
||||
deployment
|
||||
.track_if_analytics_allowed(
|
||||
"follow_up_queue_cancelled",
|
||||
serde_json::json!({
|
||||
"workspace_id": workspace.id.to_string(),
|
||||
"session_id": session.id.to_string(),
|
||||
"workspace_id": session.workspace_id.to_string(),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
@@ -68,12 +70,14 @@ pub async fn cancel_queued_message(
|
||||
Ok(ResponseJson(ApiResponse::success(QueueStatus::Empty)))
|
||||
}
|
||||
|
||||
/// Get the current queue status for a workspace
|
||||
/// Get the current queue status for a session's workspace
|
||||
pub async fn get_queue_status(
|
||||
Extension(workspace): Extension<Workspace>,
|
||||
Extension(session): Extension<Session>,
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
) -> Result<ResponseJson<ApiResponse<QueueStatus>>, ApiError> {
|
||||
let status = deployment.queued_message_service().get_status(workspace.id);
|
||||
let status = deployment
|
||||
.queued_message_service()
|
||||
.get_status(session.workspace_id);
|
||||
|
||||
Ok(ResponseJson(ApiResponse::success(status)))
|
||||
}
|
||||
@@ -88,6 +92,6 @@ pub fn router(deployment: &DeploymentImpl) -> Router<DeploymentImpl> {
|
||||
)
|
||||
.layer(from_fn_with_state(
|
||||
deployment.clone(),
|
||||
load_workspace_middleware,
|
||||
load_session_middleware,
|
||||
))
|
||||
}
|
||||
@@ -3,7 +3,6 @@ pub mod cursor_setup;
|
||||
pub mod gh_cli_setup;
|
||||
pub mod images;
|
||||
pub mod pr;
|
||||
pub mod queue;
|
||||
pub mod util;
|
||||
|
||||
use std::{
|
||||
@@ -27,7 +26,6 @@ use db::models::{
|
||||
merge::{Merge, MergeStatus, PrMerge, PullRequestInfo},
|
||||
project_repo::ProjectRepo,
|
||||
repo::{Repo, RepoError},
|
||||
scratch::{Scratch, ScratchType},
|
||||
session::{CreateSession, Session},
|
||||
task::{Task, TaskRelationships, TaskStatus},
|
||||
workspace::{CreateWorkspace, Workspace, WorkspaceError},
|
||||
@@ -37,7 +35,6 @@ use deployment::Deployment;
|
||||
use executors::{
|
||||
actions::{
|
||||
ExecutorAction, ExecutorActionType,
|
||||
coding_agent_follow_up::CodingAgentFollowUpRequest,
|
||||
script::{ScriptContext, ScriptRequest, ScriptRequestLanguage},
|
||||
},
|
||||
executors::{CodingAgent, ExecutorError},
|
||||
@@ -56,10 +53,8 @@ use utils::response::ApiResponse;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
DeploymentImpl,
|
||||
error::ApiError,
|
||||
middleware::load_workspace_middleware,
|
||||
routes::task_attempts::{gh_cli_setup::GhCliSetupError, util::restore_worktrees_to_process},
|
||||
DeploymentImpl, error::ApiError, middleware::load_workspace_middleware,
|
||||
routes::task_attempts::gh_cli_setup::GhCliSetupError,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, TS)]
|
||||
@@ -231,158 +226,6 @@ pub async fn run_agent_setup(
|
||||
Ok(ResponseJson(ApiResponse::success(RunAgentSetupResponse {})))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, TS)]
|
||||
pub struct CreateFollowUpAttempt {
|
||||
pub prompt: String,
|
||||
pub variant: Option<String>,
|
||||
pub retry_process_id: Option<Uuid>,
|
||||
pub force_when_dirty: Option<bool>,
|
||||
pub perform_git_reset: Option<bool>,
|
||||
}
|
||||
|
||||
pub async fn follow_up(
|
||||
Extension(workspace): Extension<Workspace>,
|
||||
State(deployment): State<DeploymentImpl>,
|
||||
Json(payload): Json<CreateFollowUpAttempt>,
|
||||
) -> Result<ResponseJson<ApiResponse<ExecutionProcess>>, ApiError> {
|
||||
tracing::info!("{:?}", workspace);
|
||||
|
||||
let pool = &deployment.db().pool;
|
||||
|
||||
deployment
|
||||
.container()
|
||||
.ensure_container_exists(&workspace)
|
||||
.await?;
|
||||
|
||||
// Get executor profile data from the latest CodingAgent process
|
||||
let initial_executor_profile_id =
|
||||
ExecutionProcess::latest_executor_profile_for_workspace(pool, workspace.id).await?;
|
||||
|
||||
let executor_profile_id = ExecutorProfileId {
|
||||
executor: initial_executor_profile_id.executor,
|
||||
variant: payload.variant,
|
||||
};
|
||||
|
||||
// Get parent task
|
||||
let task = workspace
|
||||
.parent_task(pool)
|
||||
.await?
|
||||
.ok_or(SqlxError::RowNotFound)?;
|
||||
|
||||
// Get parent project
|
||||
let project = task
|
||||
.parent_project(pool)
|
||||
.await?
|
||||
.ok_or(SqlxError::RowNotFound)?;
|
||||
|
||||
// If retry settings provided, perform replace-logic before proceeding
|
||||
if let Some(proc_id) = payload.retry_process_id {
|
||||
// Validate process belongs to this workspace (via session)
|
||||
let process =
|
||||
ExecutionProcess::find_by_id(pool, proc_id)
|
||||
.await?
|
||||
.ok_or(ApiError::Workspace(WorkspaceError::ValidationError(
|
||||
"Process not found".to_string(),
|
||||
)))?;
|
||||
let process_session =
|
||||
Session::find_by_id(pool, process.session_id)
|
||||
.await?
|
||||
.ok_or(ApiError::Workspace(WorkspaceError::ValidationError(
|
||||
"Session not found".to_string(),
|
||||
)))?;
|
||||
if process_session.workspace_id != workspace.id {
|
||||
return Err(ApiError::Workspace(WorkspaceError::ValidationError(
|
||||
"Process does not belong to this workspace".to_string(),
|
||||
)));
|
||||
}
|
||||
|
||||
// Reset all repository worktrees to the state before the target process
|
||||
let force_when_dirty = payload.force_when_dirty.unwrap_or(false);
|
||||
let perform_git_reset = payload.perform_git_reset.unwrap_or(true);
|
||||
restore_worktrees_to_process(
|
||||
&deployment,
|
||||
pool,
|
||||
&workspace,
|
||||
proc_id,
|
||||
perform_git_reset,
|
||||
force_when_dirty,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Stop any running processes for this workspace (except dev server)
|
||||
deployment.container().try_stop(&workspace, false).await;
|
||||
|
||||
// Soft-drop the target process and all later processes in that session
|
||||
let _ = ExecutionProcess::drop_at_and_after(pool, process.session_id, proc_id).await?;
|
||||
}
|
||||
|
||||
// Get or create a session for the follow-up
|
||||
let session = match Session::find_latest_by_workspace_id(pool, workspace.id).await? {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
Session::create(
|
||||
pool,
|
||||
&CreateSession {
|
||||
executor: Some(executor_profile_id.executor.to_string()),
|
||||
},
|
||||
Uuid::new_v4(),
|
||||
workspace.id,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
let latest_agent_session_id =
|
||||
ExecutionProcess::find_latest_agent_session_id_by_workspace(pool, workspace.id).await?;
|
||||
|
||||
let prompt = payload.prompt;
|
||||
|
||||
let project_repos = ProjectRepo::find_by_project_id_with_names(pool, project.id).await?;
|
||||
let cleanup_action = deployment
|
||||
.container()
|
||||
.cleanup_actions_for_repos(&project_repos);
|
||||
|
||||
let action_type = if let Some(agent_session_id) = latest_agent_session_id {
|
||||
ExecutorActionType::CodingAgentFollowUpRequest(CodingAgentFollowUpRequest {
|
||||
prompt: prompt.clone(),
|
||||
session_id: agent_session_id,
|
||||
executor_profile_id: executor_profile_id.clone(),
|
||||
})
|
||||
} else {
|
||||
ExecutorActionType::CodingAgentInitialRequest(
|
||||
executors::actions::coding_agent_initial::CodingAgentInitialRequest {
|
||||
prompt,
|
||||
executor_profile_id: executor_profile_id.clone(),
|
||||
},
|
||||
)
|
||||
};
|
||||
|
||||
let action = ExecutorAction::new(action_type, cleanup_action.map(Box::new));
|
||||
|
||||
let execution_process = deployment
|
||||
.container()
|
||||
.start_execution(
|
||||
&workspace,
|
||||
&session,
|
||||
&action,
|
||||
&ExecutionProcessRunReason::CodingAgent,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Clear the draft follow-up scratch on successful spawn
|
||||
// This ensures the scratch is wiped even if the user navigates away quickly
|
||||
if let Err(e) = Scratch::delete(pool, workspace.id, &ScratchType::DraftFollowUp).await {
|
||||
// Log but don't fail the request - scratch deletion is best-effort
|
||||
tracing::debug!(
|
||||
"Failed to delete draft follow-up scratch for attempt {}: {}",
|
||||
workspace.id,
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
Ok(ResponseJson(ApiResponse::success(execution_process)))
|
||||
}
|
||||
|
||||
#[axum::debug_handler]
|
||||
pub async fn stream_task_attempt_diff_ws(
|
||||
ws: WebSocketUpgrade,
|
||||
@@ -1625,7 +1468,6 @@ pub async fn get_task_attempt_repos(
|
||||
pub fn router(deployment: &DeploymentImpl) -> Router<DeploymentImpl> {
|
||||
let task_attempt_id_router = Router::new()
|
||||
.route("/", get(get_task_attempt))
|
||||
.route("/follow-up", post(follow_up))
|
||||
.route("/run-agent-setup", post(run_agent_setup))
|
||||
.route("/gh-cli-setup", post(gh_cli_setup_handler))
|
||||
.route("/start-dev-server", post(start_dev_server))
|
||||
@@ -1655,8 +1497,7 @@ pub fn router(deployment: &DeploymentImpl) -> Router<DeploymentImpl> {
|
||||
let task_attempts_router = Router::new()
|
||||
.route("/", get(get_task_attempts).post(create_task_attempt))
|
||||
.nest("/{id}", task_attempt_id_router)
|
||||
.nest("/{id}/images", images::router(deployment))
|
||||
.nest("/{id}/queue", queue::router(deployment));
|
||||
.nest("/{id}/images", images::router(deployment));
|
||||
|
||||
Router::new().nest("/task-attempts", task_attempts_router)
|
||||
}
|
||||
|
||||
@@ -119,17 +119,30 @@ async fn trigger_pr_description_follow_up(
|
||||
|
||||
drop(config); // Release the lock before async operations
|
||||
|
||||
// Get executor profile from the latest coding agent process
|
||||
let executor_profile_id = ExecutionProcess::latest_executor_profile_for_workspace(
|
||||
// Get or create a session for this follow-up
|
||||
let session =
|
||||
match Session::find_latest_by_workspace_id(&deployment.db().pool, workspace.id).await? {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
Session::create(
|
||||
&deployment.db().pool,
|
||||
&CreateSession { executor: None },
|
||||
Uuid::new_v4(),
|
||||
workspace.id,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
// Get executor profile from the latest coding agent process in this session
|
||||
let executor_profile_id =
|
||||
ExecutionProcess::latest_executor_profile_for_session(&deployment.db().pool, session.id)
|
||||
.await?;
|
||||
|
||||
// Get latest agent session ID if one exists (for coding agent continuity)
|
||||
let latest_agent_session_id = ExecutionProcess::find_latest_agent_session_id_by_workspace(
|
||||
let latest_agent_session_id = ExecutionProcess::find_latest_coding_agent_turn_session_id(
|
||||
&deployment.db().pool,
|
||||
workspace.id,
|
||||
session.id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -149,23 +162,6 @@ async fn trigger_pr_description_follow_up(
|
||||
|
||||
let action = ExecutorAction::new(action_type, None);
|
||||
|
||||
// Get or create a session for this follow-up
|
||||
let session =
|
||||
match Session::find_latest_by_workspace_id(&deployment.db().pool, workspace.id).await? {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
Session::create(
|
||||
&deployment.db().pool,
|
||||
&CreateSession {
|
||||
executor: Some(executor_profile_id.to_string()),
|
||||
},
|
||||
Uuid::new_v4(),
|
||||
workspace.id,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
deployment
|
||||
.container()
|
||||
.start_execution(
|
||||
|
||||
@@ -126,11 +126,11 @@ export function NextActionCard({
|
||||
}, [attemptId, task]);
|
||||
|
||||
const handleRunSetup = useCallback(async () => {
|
||||
if (!attemptId || !attempt) return;
|
||||
if (!attemptId || !attempt?.session?.executor) return;
|
||||
try {
|
||||
await attemptsApi.runAgentSetup(attemptId, {
|
||||
executor_profile_id: {
|
||||
executor: attempt.executor as BaseCodingAgent,
|
||||
executor: attempt.session.executor as BaseCodingAgent,
|
||||
variant: null,
|
||||
},
|
||||
});
|
||||
@@ -140,12 +140,14 @@ export function NextActionCard({
|
||||
}, [attemptId, attempt]);
|
||||
|
||||
const canAutoSetup = !!(
|
||||
attempt?.executor &&
|
||||
capabilities?.[attempt.executor]?.includes(BaseAgentCapability.SETUP_HELPER)
|
||||
attempt?.session?.executor &&
|
||||
capabilities?.[attempt.session.executor]?.includes(
|
||||
BaseAgentCapability.SETUP_HELPER
|
||||
)
|
||||
);
|
||||
|
||||
const setupHelpText = canAutoSetup
|
||||
? t('attempt.setupHelpText', { agent: attempt?.executor })
|
||||
? t('attempt.setupHelpText', { agent: attempt?.session?.executor })
|
||||
: null;
|
||||
|
||||
const editorName = getIdeName(config?.editor?.editor_type);
|
||||
|
||||
@@ -37,6 +37,9 @@ export function RetryEditorInline({
|
||||
const [message, setMessage] = useState(initialContent);
|
||||
const [sendError, setSendError] = useState<string | null>(null);
|
||||
|
||||
// Get sessionId from attempt's session
|
||||
const sessionId = attempt.session?.id;
|
||||
|
||||
// Extract variant from the process being retried
|
||||
const processVariant = useMemo<string | null>(() => {
|
||||
const process = attemptData.processes?.find(
|
||||
@@ -71,13 +74,13 @@ export function RetryEditorInline({
|
||||
});
|
||||
|
||||
const retryMutation = useRetryProcess(
|
||||
attemptId,
|
||||
sessionId ?? '',
|
||||
() => onCancelled?.(),
|
||||
(err) => setSendError((err as Error)?.message || 'Failed to send retry')
|
||||
);
|
||||
|
||||
const isSending = retryMutation.isPending;
|
||||
const canSend = !isAttemptRunning && !!message.trim();
|
||||
const canSend = !isAttemptRunning && !!message.trim() && !!sessionId;
|
||||
|
||||
const onCancel = () => {
|
||||
onCancelled?.();
|
||||
@@ -170,7 +173,7 @@ export function RetryEditorInline({
|
||||
<VariantSelector
|
||||
selectedVariant={selectedVariant}
|
||||
onChange={setSelectedVariant}
|
||||
currentProfile={profiles?.[attempt.executor] ?? null}
|
||||
currentProfile={profiles?.[attempt.session?.executor ?? ''] ?? null}
|
||||
/>
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
|
||||
@@ -23,8 +23,8 @@ const UserMessage = ({
|
||||
const { isAttemptRunning } = useAttemptExecution(taskAttempt?.id);
|
||||
|
||||
const canFork = !!(
|
||||
taskAttempt?.executor &&
|
||||
capabilities?.[taskAttempt.executor]?.includes(
|
||||
taskAttempt?.session?.executor &&
|
||||
capabilities?.[taskAttempt.session.executor]?.includes(
|
||||
BaseAgentCapability.SESSION_FORK
|
||||
)
|
||||
);
|
||||
|
||||
@@ -99,8 +99,8 @@ const CreateAttemptDialogImpl = NiceModal.create<CreateAttemptDialogProps>(
|
||||
}, [modal.visible, resetBranchSelection]);
|
||||
|
||||
const defaultProfile: ExecutorProfileId | null = useMemo(() => {
|
||||
if (latestAttempt?.executor) {
|
||||
const lastExec = latestAttempt.executor as BaseCodingAgent;
|
||||
if (latestAttempt?.session?.executor) {
|
||||
const lastExec = latestAttempt.session.executor as BaseCodingAgent;
|
||||
// If the last attempt used the same executor as the user's current preference,
|
||||
// we assume they want to use their preferred variant as well.
|
||||
// Otherwise, we default to the "default" variant (null) since we don't know
|
||||
@@ -116,7 +116,7 @@ const CreateAttemptDialogImpl = NiceModal.create<CreateAttemptDialogProps>(
|
||||
};
|
||||
}
|
||||
return config?.executor_profile ?? null;
|
||||
}, [latestAttempt?.executor, config?.executor_profile]);
|
||||
}, [latestAttempt?.session?.executor, config?.executor_profile]);
|
||||
|
||||
const effectiveProfile = userSelectedProfile ?? defaultProfile;
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ const TaskAttemptPanel = ({
|
||||
<VirtualizedList key={attempt.id} attempt={attempt} task={task} />
|
||||
),
|
||||
followUp: (
|
||||
<TaskFollowUpSection task={task} selectedAttemptId={attempt.id} />
|
||||
<TaskFollowUpSection task={task} session={attempt.session} />
|
||||
),
|
||||
})}
|
||||
</RetryUiProvider>
|
||||
|
||||
@@ -80,7 +80,7 @@ const TaskPanel = ({ task }: TaskPanelProps) => {
|
||||
{
|
||||
id: 'executor',
|
||||
header: '',
|
||||
accessor: (attempt) => attempt.executor || 'Base Agent',
|
||||
accessor: (attempt) => attempt.session?.executor || 'Base Agent',
|
||||
className: 'pr-4',
|
||||
},
|
||||
{
|
||||
|
||||
@@ -60,24 +60,30 @@ import { useQueueStatus } from '@/hooks/useQueueStatus';
|
||||
import { imagesApi, attemptsApi } from '@/lib/api';
|
||||
import { GitHubCommentsDialog } from '@/components/dialogs/tasks/GitHubCommentsDialog';
|
||||
import type { NormalizedComment } from '@/components/ui/wysiwyg/nodes/github-comment-node';
|
||||
import type { Session } from 'shared/types';
|
||||
|
||||
interface TaskFollowUpSectionProps {
|
||||
task: TaskWithAttemptStatus;
|
||||
selectedAttemptId?: string;
|
||||
session?: Session;
|
||||
}
|
||||
|
||||
export function TaskFollowUpSection({
|
||||
task,
|
||||
selectedAttemptId,
|
||||
session,
|
||||
}: TaskFollowUpSectionProps) {
|
||||
const { t } = useTranslation('tasks');
|
||||
const { projectId } = useProject();
|
||||
|
||||
// Derive IDs from session
|
||||
const workspaceId = session?.workspace_id;
|
||||
const sessionId = session?.id;
|
||||
|
||||
const { isAttemptRunning, stopExecution, isStopping, processes } =
|
||||
useAttemptExecution(selectedAttemptId, task.id);
|
||||
useAttemptExecution(workspaceId, task.id);
|
||||
|
||||
const { data: branchStatus, refetch: refetchBranchStatus } =
|
||||
useBranchStatus(selectedAttemptId);
|
||||
const { repos, selectedRepoId } = useAttemptRepo(selectedAttemptId);
|
||||
useBranchStatus(workspaceId);
|
||||
const { repos, selectedRepoId } = useAttemptRepo(workspaceId);
|
||||
|
||||
const getSelectedRepoId = useCallback(() => {
|
||||
return selectedRepoId ?? repos[0]?.id;
|
||||
@@ -91,7 +97,7 @@ export function TaskFollowUpSection({
|
||||
[branchStatus]
|
||||
);
|
||||
const { branch: attemptBranch, refetch: refetchAttemptBranch } =
|
||||
useAttemptBranch(selectedAttemptId);
|
||||
useAttemptBranch(workspaceId);
|
||||
const { profiles } = useUserSystem();
|
||||
const { comments, generateReviewMarkdown, clearComments } = useReview();
|
||||
const {
|
||||
@@ -127,7 +133,7 @@ export function TaskFollowUpSection({
|
||||
scratch,
|
||||
updateScratch,
|
||||
isLoading: isScratchLoading,
|
||||
} = useScratch(ScratchType.DRAFT_FOLLOW_UP, selectedAttemptId ?? '');
|
||||
} = useScratch(ScratchType.DRAFT_FOLLOW_UP, workspaceId ?? '');
|
||||
|
||||
// Derive the message and variant from scratch
|
||||
const scratchData: DraftFollowUpData | undefined =
|
||||
@@ -201,7 +207,7 @@ export function TaskFollowUpSection({
|
||||
// Uses scratchRef to avoid callback invalidation when scratch updates
|
||||
const saveToScratch = useCallback(
|
||||
async (message: string, variant: string | null) => {
|
||||
if (!selectedAttemptId) return;
|
||||
if (!workspaceId) return;
|
||||
// Don't create empty scratch entries - only save if there's actual content,
|
||||
// a variant is selected, or scratch already exists (to allow clearing a draft)
|
||||
if (!message.trim() && !variant && !scratchRef.current) return;
|
||||
@@ -216,7 +222,7 @@ export function TaskFollowUpSection({
|
||||
console.error('Failed to save follow-up draft', e);
|
||||
}
|
||||
},
|
||||
[selectedAttemptId, updateScratch]
|
||||
[workspaceId, updateScratch]
|
||||
);
|
||||
|
||||
// Wrapper to update variant and save to scratch immediately
|
||||
@@ -259,7 +265,7 @@ export function TaskFollowUpSection({
|
||||
queueMessage,
|
||||
cancelQueue,
|
||||
refresh: refreshQueueStatus,
|
||||
} = useQueueStatus(selectedAttemptId);
|
||||
} = useQueueStatus(sessionId);
|
||||
|
||||
// Track previous process count to detect new processes
|
||||
const prevProcessCountRef = useRef(processes.length);
|
||||
@@ -269,7 +275,7 @@ export function TaskFollowUpSection({
|
||||
const prevCount = prevProcessCountRef.current;
|
||||
prevProcessCountRef.current = processes.length;
|
||||
|
||||
if (!selectedAttemptId) return;
|
||||
if (!workspaceId) return;
|
||||
|
||||
// Refresh when execution stops
|
||||
if (!isAttemptRunning) {
|
||||
@@ -286,7 +292,7 @@ export function TaskFollowUpSection({
|
||||
}
|
||||
}, [
|
||||
isAttemptRunning,
|
||||
selectedAttemptId,
|
||||
workspaceId,
|
||||
processes.length,
|
||||
refreshQueueStatus,
|
||||
scratchData?.message,
|
||||
@@ -312,7 +318,7 @@ export function TaskFollowUpSection({
|
||||
// Send follow-up action
|
||||
const { isSendingFollowUp, followUpError, setFollowUpError, onSendFollowUp } =
|
||||
useFollowUpSend({
|
||||
attemptId: selectedAttemptId,
|
||||
sessionId,
|
||||
message: localMessage,
|
||||
conflictMarkdown: conflictResolutionInstructions,
|
||||
reviewMarkdown,
|
||||
@@ -329,7 +335,7 @@ export function TaskFollowUpSection({
|
||||
|
||||
// Separate logic for when textarea should be disabled vs when send button should be disabled
|
||||
const canTypeFollowUp = useMemo(() => {
|
||||
if (!selectedAttemptId || processes.length === 0 || isSendingFollowUp) {
|
||||
if (!workspaceId || processes.length === 0 || isSendingFollowUp) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -338,7 +344,7 @@ export function TaskFollowUpSection({
|
||||
// Note: isQueued no longer blocks typing - editing auto-cancels the queue
|
||||
return true;
|
||||
}, [
|
||||
selectedAttemptId,
|
||||
workspaceId,
|
||||
processes.length,
|
||||
isSendingFollowUp,
|
||||
isRetryActive,
|
||||
@@ -369,22 +375,22 @@ export function TaskFollowUpSection({
|
||||
const hasAnyScript = true;
|
||||
|
||||
const handleRunSetupScript = useCallback(async () => {
|
||||
if (!selectedAttemptId || isAttemptRunning) return;
|
||||
if (!workspaceId || isAttemptRunning) return;
|
||||
try {
|
||||
await attemptsApi.runSetupScript(selectedAttemptId);
|
||||
await attemptsApi.runSetupScript(workspaceId);
|
||||
} catch (error) {
|
||||
console.error('Failed to run setup script:', error);
|
||||
}
|
||||
}, [selectedAttemptId, isAttemptRunning]);
|
||||
}, [workspaceId, isAttemptRunning]);
|
||||
|
||||
const handleRunCleanupScript = useCallback(async () => {
|
||||
if (!selectedAttemptId || isAttemptRunning) return;
|
||||
if (!workspaceId || isAttemptRunning) return;
|
||||
try {
|
||||
await attemptsApi.runCleanupScript(selectedAttemptId);
|
||||
await attemptsApi.runCleanupScript(workspaceId);
|
||||
} catch (error) {
|
||||
console.error('Failed to run cleanup script:', error);
|
||||
}
|
||||
}, [selectedAttemptId, isAttemptRunning]);
|
||||
}, [workspaceId, isAttemptRunning]);
|
||||
|
||||
// Handler to queue the current message for execution after agent finishes
|
||||
const handleQueueMessage = useCallback(async () => {
|
||||
@@ -469,14 +475,11 @@ export function TaskFollowUpSection({
|
||||
// Handle image paste - upload to container and insert markdown
|
||||
const handlePasteFiles = useCallback(
|
||||
async (files: File[]) => {
|
||||
if (!selectedAttemptId) return;
|
||||
if (!workspaceId) return;
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const response = await imagesApi.uploadForAttempt(
|
||||
selectedAttemptId,
|
||||
file
|
||||
);
|
||||
const response = await imagesApi.uploadForAttempt(workspaceId, file);
|
||||
// Append markdown image to current message
|
||||
const imageMarkdown = ``;
|
||||
|
||||
@@ -503,7 +506,7 @@ export function TaskFollowUpSection({
|
||||
}
|
||||
}
|
||||
},
|
||||
[selectedAttemptId]
|
||||
[workspaceId]
|
||||
);
|
||||
|
||||
// Attachment button - file input ref and handlers
|
||||
@@ -527,12 +530,12 @@ export function TaskFollowUpSection({
|
||||
|
||||
// Handler for GitHub comments insertion
|
||||
const handleGitHubCommentClick = useCallback(async () => {
|
||||
if (!selectedAttemptId) return;
|
||||
if (!workspaceId) return;
|
||||
const repoId = getSelectedRepoId();
|
||||
if (!repoId) return;
|
||||
|
||||
const result = await GitHubCommentsDialog.show({
|
||||
attemptId: selectedAttemptId,
|
||||
attemptId: workspaceId,
|
||||
repoId,
|
||||
});
|
||||
if (result.comments.length > 0) {
|
||||
@@ -575,7 +578,7 @@ export function TaskFollowUpSection({
|
||||
});
|
||||
}
|
||||
}
|
||||
}, [selectedAttemptId, getSelectedRepoId]);
|
||||
}, [workspaceId, getSelectedRepoId]);
|
||||
|
||||
// Stable onChange handler for WYSIWYGEditor
|
||||
const handleEditorChange = useCallback(
|
||||
@@ -637,19 +640,19 @@ export function TaskFollowUpSection({
|
||||
// When a process completes (e.g., agent resolved conflicts), refresh branch status promptly
|
||||
const prevRunningRef = useRef<boolean>(isAttemptRunning);
|
||||
useEffect(() => {
|
||||
if (prevRunningRef.current && !isAttemptRunning && selectedAttemptId) {
|
||||
if (prevRunningRef.current && !isAttemptRunning && workspaceId) {
|
||||
refetchBranchStatus();
|
||||
refetchAttemptBranch();
|
||||
}
|
||||
prevRunningRef.current = isAttemptRunning;
|
||||
}, [
|
||||
isAttemptRunning,
|
||||
selectedAttemptId,
|
||||
workspaceId,
|
||||
refetchBranchStatus,
|
||||
refetchAttemptBranch,
|
||||
]);
|
||||
|
||||
if (!selectedAttemptId) return null;
|
||||
if (!workspaceId) return null;
|
||||
|
||||
if (isScratchLoading) {
|
||||
return (
|
||||
@@ -688,7 +691,7 @@ export function TaskFollowUpSection({
|
||||
{/* Conflict notice and actions (optional UI) */}
|
||||
{branchStatus && (
|
||||
<FollowUpConflictSection
|
||||
selectedAttemptId={selectedAttemptId}
|
||||
workspaceId={workspaceId}
|
||||
attemptBranch={attemptBranch}
|
||||
branchStatus={branchStatus}
|
||||
isEditable={isEditable}
|
||||
@@ -734,7 +737,7 @@ export function TaskFollowUpSection({
|
||||
disabled={!isEditable}
|
||||
onPasteFiles={handlePasteFiles}
|
||||
projectId={projectId}
|
||||
taskAttemptId={selectedAttemptId}
|
||||
taskAttemptId={workspaceId}
|
||||
onCmdEnter={handleSubmitShortcut}
|
||||
className="min-h-[40px]"
|
||||
/>
|
||||
|
||||
@@ -5,7 +5,7 @@ import { useAttemptConflicts } from '@/hooks/useAttemptConflicts';
|
||||
import type { RepoBranchStatus } from 'shared/types';
|
||||
|
||||
type Props = {
|
||||
selectedAttemptId?: string;
|
||||
workspaceId?: string;
|
||||
attemptBranch: string | null;
|
||||
branchStatus: RepoBranchStatus[] | undefined;
|
||||
isEditable: boolean;
|
||||
@@ -16,7 +16,7 @@ type Props = {
|
||||
};
|
||||
|
||||
export function FollowUpConflictSection({
|
||||
selectedAttemptId,
|
||||
workspaceId,
|
||||
attemptBranch,
|
||||
branchStatus,
|
||||
onResolve,
|
||||
@@ -28,9 +28,9 @@ export function FollowUpConflictSection({
|
||||
(r) => r.is_rebase_in_progress || (r.conflicted_files?.length ?? 0) > 0
|
||||
);
|
||||
const op = repoWithConflicts?.conflict_op ?? null;
|
||||
const openInEditor = useOpenInEditor(selectedAttemptId);
|
||||
const openInEditor = useOpenInEditor(workspaceId);
|
||||
const repoId = repoWithConflicts?.repo_id;
|
||||
const { abortConflicts } = useAttemptConflicts(selectedAttemptId, repoId);
|
||||
const { abortConflicts } = useAttemptConflicts(workspaceId, repoId);
|
||||
|
||||
// write using setAborting and read through abortingRef in async handlers
|
||||
const [aborting, setAborting] = useState(false);
|
||||
@@ -51,12 +51,12 @@ export function FollowUpConflictSection({
|
||||
onResolve={onResolve}
|
||||
enableResolve={enableResolve && !aborting}
|
||||
onOpenEditor={() => {
|
||||
if (!selectedAttemptId) return;
|
||||
if (!workspaceId) return;
|
||||
const first = repoWithConflicts.conflicted_files?.[0];
|
||||
openInEditor(first ? { filePath: first } : undefined);
|
||||
}}
|
||||
onAbort={async () => {
|
||||
if (!selectedAttemptId) return;
|
||||
if (!workspaceId) return;
|
||||
if (!enableAbort || abortingRef.current) return;
|
||||
try {
|
||||
setAborting(true);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { attemptsApi } from '@/lib/api';
|
||||
import { sessionsApi } from '@/lib/api';
|
||||
import type { CreateFollowUpAttempt } from 'shared/types';
|
||||
|
||||
type Args = {
|
||||
attemptId?: string;
|
||||
sessionId?: string;
|
||||
message: string;
|
||||
conflictMarkdown: string | null;
|
||||
reviewMarkdown: string;
|
||||
@@ -15,7 +15,7 @@ type Args = {
|
||||
};
|
||||
|
||||
export function useFollowUpSend({
|
||||
attemptId,
|
||||
sessionId,
|
||||
message,
|
||||
conflictMarkdown,
|
||||
reviewMarkdown,
|
||||
@@ -29,7 +29,7 @@ export function useFollowUpSend({
|
||||
const [followUpError, setFollowUpError] = useState<string | null>(null);
|
||||
|
||||
const onSendFollowUp = useCallback(async () => {
|
||||
if (!attemptId) return;
|
||||
if (!sessionId) return;
|
||||
const extraMessage = message.trim();
|
||||
const finalPrompt = [
|
||||
conflictMarkdown,
|
||||
@@ -50,7 +50,7 @@ export function useFollowUpSend({
|
||||
force_when_dirty: null,
|
||||
perform_git_reset: null,
|
||||
};
|
||||
await attemptsApi.followUp(attemptId, body);
|
||||
await sessionsApi.followUp(sessionId, body);
|
||||
clearComments();
|
||||
clearClickedElements?.();
|
||||
onAfterSendCleanup();
|
||||
@@ -64,7 +64,7 @@ export function useFollowUpSend({
|
||||
setIsSendingFollowUp(false);
|
||||
}
|
||||
}, [
|
||||
attemptId,
|
||||
sessionId,
|
||||
message,
|
||||
conflictMarkdown,
|
||||
reviewMarkdown,
|
||||
|
||||
@@ -19,55 +19,55 @@ interface UseQueueStatusResult {
|
||||
refresh: () => Promise<void>;
|
||||
}
|
||||
|
||||
export function useQueueStatus(attemptId?: string): UseQueueStatusResult {
|
||||
export function useQueueStatus(sessionId?: string): UseQueueStatusResult {
|
||||
const [queueStatus, setQueueStatus] = useState<QueueStatus>({
|
||||
status: 'empty',
|
||||
});
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const refresh = useCallback(async () => {
|
||||
if (!attemptId) return;
|
||||
if (!sessionId) return;
|
||||
try {
|
||||
const status = await queueApi.getStatus(attemptId);
|
||||
const status = await queueApi.getStatus(sessionId);
|
||||
setQueueStatus(status);
|
||||
} catch (e) {
|
||||
console.error('Failed to fetch queue status:', e);
|
||||
}
|
||||
}, [attemptId]);
|
||||
}, [sessionId]);
|
||||
|
||||
const queueMessage = useCallback(
|
||||
async (message: string, variant: string | null) => {
|
||||
if (!attemptId) return;
|
||||
if (!sessionId) return;
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const status = await queueApi.queue(attemptId, { message, variant });
|
||||
const status = await queueApi.queue(sessionId, { message, variant });
|
||||
setQueueStatus(status);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[attemptId]
|
||||
[sessionId]
|
||||
);
|
||||
|
||||
const cancelQueue = useCallback(async () => {
|
||||
if (!attemptId) return;
|
||||
if (!sessionId) return;
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const status = await queueApi.cancel(attemptId);
|
||||
const status = await queueApi.cancel(sessionId);
|
||||
setQueueStatus(status);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [attemptId]);
|
||||
}, [sessionId]);
|
||||
|
||||
// Fetch initial status when attemptId changes
|
||||
// Fetch initial status when sessionId changes
|
||||
useEffect(() => {
|
||||
if (attemptId) {
|
||||
if (sessionId) {
|
||||
refresh();
|
||||
} else {
|
||||
setQueueStatus({ status: 'empty' });
|
||||
}
|
||||
}, [attemptId, refresh]);
|
||||
}, [sessionId, refresh]);
|
||||
|
||||
const isQueued = queueStatus.status === 'queued';
|
||||
const queuedMessage = isQueued
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useMutation } from '@tanstack/react-query';
|
||||
import { attemptsApi } from '@/lib/api';
|
||||
import { sessionsApi } from '@/lib/api';
|
||||
import {
|
||||
RestoreLogsDialog,
|
||||
type RestoreLogsDialogResult,
|
||||
@@ -22,7 +22,7 @@ class RetryDialogCancelledError extends Error {
|
||||
}
|
||||
|
||||
export function useRetryProcess(
|
||||
attemptId: string,
|
||||
sessionId: string,
|
||||
onSuccess?: () => void,
|
||||
onError?: (err: unknown) => void
|
||||
) {
|
||||
@@ -50,7 +50,7 @@ export function useRetryProcess(
|
||||
}
|
||||
|
||||
// Send the retry request
|
||||
await attemptsApi.followUp(attemptId, {
|
||||
await sessionsApi.followUp(sessionId, {
|
||||
prompt: message,
|
||||
variant,
|
||||
retry_process_id: executionProcessId,
|
||||
|
||||
@@ -29,7 +29,7 @@ export function useTaskAttempts(taskId?: string, opts?: Options) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for components that need executor field for all attempts.
|
||||
* Hook for components that need session data for all attempts.
|
||||
* Fetches all attempts and their sessions in parallel.
|
||||
*/
|
||||
export function useTaskAttemptsWithSessions(taskId?: string, opts?: Options) {
|
||||
@@ -45,8 +45,8 @@ export function useTaskAttemptsWithSessions(taskId?: string, opts?: Options) {
|
||||
attempts.map((attempt) => sessionsApi.getByWorkspace(attempt.id))
|
||||
);
|
||||
return attempts.map((attempt, i) => {
|
||||
const executor = sessionsResults[i][0]?.executor ?? 'unknown';
|
||||
return createWorkspaceWithSession(attempt, executor);
|
||||
const session = sessionsResults[i][0];
|
||||
return createWorkspaceWithSession(attempt, session);
|
||||
});
|
||||
},
|
||||
enabled,
|
||||
|
||||
@@ -475,6 +475,33 @@ export const sessionsApi = {
|
||||
);
|
||||
return handleApiResponse<Session[]>(response);
|
||||
},
|
||||
|
||||
getById: async (sessionId: string): Promise<Session> => {
|
||||
const response = await makeRequest(`/api/sessions/${sessionId}`);
|
||||
return handleApiResponse<Session>(response);
|
||||
},
|
||||
|
||||
create: async (data: {
|
||||
workspace_id: string;
|
||||
executor?: string;
|
||||
}): Promise<Session> => {
|
||||
const response = await makeRequest('/api/sessions', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
return handleApiResponse<Session>(response);
|
||||
},
|
||||
|
||||
followUp: async (
|
||||
sessionId: string,
|
||||
data: CreateFollowUpAttempt
|
||||
): Promise<ExecutionProcess> => {
|
||||
const response = await makeRequest(`/api/sessions/${sessionId}/follow-up`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
return handleApiResponse<ExecutionProcess>(response);
|
||||
},
|
||||
};
|
||||
|
||||
// Task Attempts APIs
|
||||
@@ -496,14 +523,13 @@ export const attemptsApi = {
|
||||
return handleApiResponse<Workspace>(response);
|
||||
},
|
||||
|
||||
/** Get workspace with executor from latest session (for components that need executor) */
|
||||
/** Get workspace with latest session */
|
||||
getWithSession: async (attemptId: string): Promise<WorkspaceWithSession> => {
|
||||
const [workspace, sessions] = await Promise.all([
|
||||
attemptsApi.get(attemptId),
|
||||
sessionsApi.getByWorkspace(attemptId),
|
||||
]);
|
||||
const executor = sessions[0]?.executor ?? 'unknown';
|
||||
return createWorkspaceWithSession(workspace, executor);
|
||||
return createWorkspaceWithSession(workspace, sessions[0]);
|
||||
},
|
||||
|
||||
create: async (data: CreateTaskAttemptBody): Promise<Workspace> => {
|
||||
@@ -521,20 +547,6 @@ export const attemptsApi = {
|
||||
return handleApiResponse<void>(response);
|
||||
},
|
||||
|
||||
followUp: async (
|
||||
attemptId: string,
|
||||
data: CreateFollowUpAttempt
|
||||
): Promise<void> => {
|
||||
const response = await makeRequest(
|
||||
`/api/task-attempts/${attemptId}/follow-up`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify(data),
|
||||
}
|
||||
);
|
||||
return handleApiResponse<void>(response);
|
||||
},
|
||||
|
||||
runAgentSetup: async (
|
||||
attemptId: string,
|
||||
data: RunAgentSetupRequest
|
||||
@@ -1244,43 +1256,37 @@ export const scratchApi = {
|
||||
`/api/scratch/${scratchType}/${id}/stream/ws`,
|
||||
};
|
||||
|
||||
// Queue API for task attempt follow-up messages
|
||||
// Queue API for session follow-up messages
|
||||
export const queueApi = {
|
||||
/**
|
||||
* Queue a follow-up message to be executed when current execution finishes
|
||||
*/
|
||||
queue: async (
|
||||
attemptId: string,
|
||||
sessionId: string,
|
||||
data: { message: string; variant: string | null }
|
||||
): Promise<QueueStatus> => {
|
||||
const response = await makeRequest(
|
||||
`/api/task-attempts/${attemptId}/queue`,
|
||||
{
|
||||
const response = await makeRequest(`/api/sessions/${sessionId}/queue`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(data),
|
||||
}
|
||||
);
|
||||
});
|
||||
return handleApiResponse<QueueStatus>(response);
|
||||
},
|
||||
|
||||
/**
|
||||
* Cancel a queued follow-up message
|
||||
*/
|
||||
cancel: async (attemptId: string): Promise<QueueStatus> => {
|
||||
const response = await makeRequest(
|
||||
`/api/task-attempts/${attemptId}/queue`,
|
||||
{
|
||||
cancel: async (sessionId: string): Promise<QueueStatus> => {
|
||||
const response = await makeRequest(`/api/sessions/${sessionId}/queue`, {
|
||||
method: 'DELETE',
|
||||
}
|
||||
);
|
||||
});
|
||||
return handleApiResponse<QueueStatus>(response);
|
||||
},
|
||||
|
||||
/**
|
||||
* Get the current queue status for a task attempt
|
||||
* Get the current queue status for a session
|
||||
*/
|
||||
getStatus: async (attemptId: string): Promise<QueueStatus> => {
|
||||
const response = await makeRequest(`/api/task-attempts/${attemptId}/queue`);
|
||||
getStatus: async (sessionId: string): Promise<QueueStatus> => {
|
||||
const response = await makeRequest(`/api/sessions/${sessionId}/queue`);
|
||||
return handleApiResponse<QueueStatus>(response);
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import type { Workspace } from 'shared/types';
|
||||
import type { Workspace, Session } from 'shared/types';
|
||||
|
||||
/**
|
||||
* WorkspaceWithSession includes executor from the latest Session.
|
||||
* Only used by components that actually need the executor field.
|
||||
* WorkspaceWithSession includes the latest Session for the workspace.
|
||||
* Provides access to session.id, session.executor, etc.
|
||||
*/
|
||||
export type WorkspaceWithSession = Workspace & {
|
||||
executor: string;
|
||||
session: Session | undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a WorkspaceWithSession from a Workspace and executor string.
|
||||
* Create a WorkspaceWithSession from a Workspace and Session.
|
||||
*/
|
||||
export function createWorkspaceWithSession(
|
||||
workspace: Workspace,
|
||||
executor: string
|
||||
session: Session | undefined
|
||||
): WorkspaceWithSession {
|
||||
return {
|
||||
...workspace,
|
||||
executor,
|
||||
session,
|
||||
};
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user