Event for merged PRs (vibe-kanban) (#924)
* Done! I've successfully added analytics tracking for when PRs are merged. Here's what was implemented: **Changes made:** 1. **crates/services/src/services/pr_monitor.rs**: - Added `AnalyticsContext` import - Added optional `analytics: Option<AnalyticsContext>` field to `PrMonitorService` struct - Updated `spawn()` method to accept `Option<AnalyticsContext>` parameter - Added analytics event tracking in `check_pr_status()` method (lines 134-147) when a PR is merged 2. **crates/deployment/src/lib.rs**: - Added `AnalyticsContext` import - Updated `spawn_pr_monitor_service()` to create and pass an `AnalyticsContext` to the PR monitor service **Analytics event details:** - **Event name**: `pr_merged` - **Properties**: - `task_id`: Associated task ID - `task_attempt_id`: Task attempt ID - `project_id`: Project ID The implementation follows the same pattern used for the `task_attempt_finished` event and the code successfully compiles. * Clippy + fmt
This commit is contained in:
@@ -17,7 +17,7 @@ use futures::{StreamExt, TryStreamExt};
|
||||
use git2::Error as Git2Error;
|
||||
use serde_json::Value;
|
||||
use services::services::{
|
||||
analytics::AnalyticsService,
|
||||
analytics::{AnalyticsContext, AnalyticsService},
|
||||
approvals::Approvals,
|
||||
auth::{AuthError, AuthService},
|
||||
config::{Config, ConfigError},
|
||||
@@ -122,7 +122,14 @@ pub trait Deployment: Clone + Send + Sync + 'static {
|
||||
async fn spawn_pr_monitor_service(&self) -> tokio::task::JoinHandle<()> {
|
||||
let db = self.db().clone();
|
||||
let config = self.config().clone();
|
||||
PrMonitorService::spawn(db, config).await
|
||||
let analytics = self
|
||||
.analytics()
|
||||
.as_ref()
|
||||
.map(|analytics_service| AnalyticsContext {
|
||||
user_id: self.user_id().to_string(),
|
||||
analytics_service: analytics_service.clone(),
|
||||
});
|
||||
PrMonitorService::spawn(db, config, analytics).await
|
||||
}
|
||||
|
||||
async fn track_if_analytics_allowed(&self, event_name: &str, properties: Value) {
|
||||
|
||||
@@ -8,12 +8,14 @@ use db::{
|
||||
task_attempt::{TaskAttempt, TaskAttemptError},
|
||||
},
|
||||
};
|
||||
use serde_json::json;
|
||||
use sqlx::error::Error as SqlxError;
|
||||
use thiserror::Error;
|
||||
use tokio::{sync::RwLock, time::interval};
|
||||
use tracing::{debug, error, info};
|
||||
|
||||
use crate::services::{
|
||||
analytics::AnalyticsContext,
|
||||
config::Config,
|
||||
github_service::{GitHubRepoInfo, GitHubService, GitHubServiceError},
|
||||
};
|
||||
@@ -35,14 +37,20 @@ pub struct PrMonitorService {
|
||||
db: DBService,
|
||||
config: Arc<RwLock<Config>>,
|
||||
poll_interval: Duration,
|
||||
analytics: Option<AnalyticsContext>,
|
||||
}
|
||||
|
||||
impl PrMonitorService {
|
||||
pub async fn spawn(db: DBService, config: Arc<RwLock<Config>>) -> tokio::task::JoinHandle<()> {
|
||||
pub async fn spawn(
|
||||
db: DBService,
|
||||
config: Arc<RwLock<Config>>,
|
||||
analytics: Option<AnalyticsContext>,
|
||||
) -> tokio::task::JoinHandle<()> {
|
||||
let service = Self {
|
||||
db,
|
||||
config,
|
||||
poll_interval: Duration::from_secs(60), // Check every minute
|
||||
analytics,
|
||||
};
|
||||
tokio::spawn(async move {
|
||||
service.start().await;
|
||||
@@ -126,6 +134,22 @@ impl PrMonitorService {
|
||||
pr_merge.pr_info.number, task_attempt.task_id
|
||||
);
|
||||
Task::update_status(&self.db.pool, task_attempt.task_id, TaskStatus::Done).await?;
|
||||
|
||||
// Track analytics event
|
||||
if let Some(analytics) = &self.analytics
|
||||
&& let Ok(Some(task)) =
|
||||
Task::find_by_id(&self.db.pool, task_attempt.task_id).await
|
||||
{
|
||||
analytics.analytics_service.track_event(
|
||||
&analytics.user_id,
|
||||
"pr_merged",
|
||||
Some(json!({
|
||||
"task_id": task_attempt.task_id.to_string(),
|
||||
"task_attempt_id": task_attempt.id.to_string(),
|
||||
"project_id": task.project_id.to_string(),
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user