diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 3421fe98..018e173f 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -122,6 +122,7 @@ jobs: runs-on: ubuntu-latest env: VITE_PUBLIC_REACT_VIRTUOSO_LICENSE_KEY: ${{ secrets.PUBLIC_REACT_VIRTUOSO_LICENSE_KEY }} + VITE_VK_SHARED_API_BASE: ${{ secrets.VK_SHARED_API_BASE }} steps: - uses: actions/checkout@v4 with: diff --git a/.github/workflows/remote-deploy-dev.yml b/.github/workflows/remote-deploy-dev.yml index 68278b2a..ba0b33ce 100644 --- a/.github/workflows/remote-deploy-dev.yml +++ b/.github/workflows/remote-deploy-dev.yml @@ -3,7 +3,7 @@ name: Remote Deploy Dev on: push: branches: - - gabriel/share + - electric - main paths: - crates/remote/** diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 468369f4..07188c92 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -62,6 +62,8 @@ jobs: - name: Build frontend run: cd frontend && npm run build + env: + NODE_OPTIONS: --max-old-space-size=8192 - name: Checks run: | diff --git a/Cargo.lock b/Cargo.lock index d56c7f1d..e255b9bd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -429,28 +429,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.108", -] - [[package]] name = "async-task" version = "4.7.1" @@ -2925,7 +2903,6 @@ name = "local-deployment" version = "0.0.126" dependencies = [ "anyhow", - "async-stream", "async-trait", "bytes", "command-group", @@ -4056,6 +4033,7 @@ dependencies = [ "tracing", "tracing-error", "tracing-subscriber", + "ts-rs 11.0.1", "url", "utils", "uuid", @@ -4097,12 +4075,14 @@ dependencies = [ "tokio", "tokio-native-tls", "tokio-rustls", + "tokio-util", "tower", "tower-http 0.6.6", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", "webpki-roots 1.0.4", ] @@ -4736,6 +4716,7 @@ dependencies = [ "openssl-sys", "os_info", "rand 0.8.5", + "remote", "reqwest", "rmcp", "rust-embed", @@ -5978,12 +5959,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" name = "utils" version = "0.0.126" dependencies = [ - "async-stream", - "async-trait", "axum 0.8.6", "bytes", "chrono", - "dashmap", "directories", "dirs 5.0.1", "futures", @@ -6155,6 +6133,19 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "web-sys" version = "0.3.82" diff --git a/crates/db/.sqlx/query-00e71b6e31b432be788fe5c8a1b8954560a3bc52910da2b93a6a816032d8d0fd.json b/crates/db/.sqlx/query-00e71b6e31b432be788fe5c8a1b8954560a3bc52910da2b93a6a816032d8d0fd.json deleted file mode 100644 index b0c1b9f7..00000000 --- a/crates/db/.sqlx/query-00e71b6e31b432be788fe5c8a1b8954560a3bc52910da2b93a6a816032d8d0fd.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n SELECT\n id AS \"id!: Uuid\",\n remote_project_id AS \"remote_project_id!: Uuid\",\n title AS title,\n description AS description,\n status AS \"status!: TaskStatus\",\n assignee_user_id AS \"assignee_user_id: Uuid\",\n assignee_first_name AS \"assignee_first_name: String\",\n assignee_last_name AS \"assignee_last_name: String\",\n assignee_username AS \"assignee_username: String\",\n version AS \"version!: i64\",\n last_event_seq AS \"last_event_seq: i64\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM shared_tasks\n WHERE rowid = $1\n ", - "describe": { - "columns": [ - { - "name": "id!: Uuid", - "ordinal": 0, - "type_info": "Blob" - }, - { - "name": "remote_project_id!: Uuid", - "ordinal": 1, - "type_info": "Blob" - }, - { - "name": "title", - "ordinal": 2, - "type_info": "Text" - }, - { - "name": "description", - "ordinal": 3, - "type_info": "Text" - }, - { - "name": "status!: TaskStatus", - "ordinal": 4, - "type_info": "Text" - }, - { - "name": "assignee_user_id: Uuid", - "ordinal": 5, - "type_info": "Blob" - }, - { - "name": "assignee_first_name: String", - "ordinal": 6, - "type_info": "Text" - }, - { - "name": "assignee_last_name: String", - "ordinal": 7, - "type_info": "Text" - }, - { - "name": "assignee_username: String", - "ordinal": 8, - "type_info": "Text" - }, - { - "name": "version!: i64", - "ordinal": 9, - "type_info": "Integer" - }, - { - "name": "last_event_seq: i64", - "ordinal": 10, - "type_info": "Integer" - }, - { - "name": "created_at!: DateTime", - "ordinal": 11, - "type_info": "Text" - }, - { - "name": "updated_at!: DateTime", - "ordinal": 12, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - true, - false, - false, - true, - false, - true, - true, - true, - true, - false, - true, - false, - false - ] - }, - "hash": "00e71b6e31b432be788fe5c8a1b8954560a3bc52910da2b93a6a816032d8d0fd" -} diff --git a/crates/db/.sqlx/query-167422f3d3b74e0f8c9773aabe62b27092c44ec88df58bd3bb4c887351c6cb93.json b/crates/db/.sqlx/query-167422f3d3b74e0f8c9773aabe62b27092c44ec88df58bd3bb4c887351c6cb93.json new file mode 100644 index 00000000..a626b4ae --- /dev/null +++ b/crates/db/.sqlx/query-167422f3d3b74e0f8c9773aabe62b27092c44ec88df58bd3bb4c887351c6cb93.json @@ -0,0 +1,68 @@ +{ + "db_name": "SQLite", + "query": "SELECT id as \"id!: Uuid\", project_id as \"project_id!: Uuid\", title, description, status as \"status!: TaskStatus\", parent_task_attempt as \"parent_task_attempt: Uuid\", shared_task_id as \"shared_task_id: Uuid\", created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"\n FROM tasks\n WHERE shared_task_id IS NOT NULL", + "describe": { + "columns": [ + { + "name": "id!: Uuid", + "ordinal": 0, + "type_info": "Blob" + }, + { + "name": "project_id!: Uuid", + "ordinal": 1, + "type_info": "Blob" + }, + { + "name": "title", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "description", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "status!: TaskStatus", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "parent_task_attempt: Uuid", + "ordinal": 5, + "type_info": "Blob" + }, + { + "name": "shared_task_id: Uuid", + "ordinal": 6, + "type_info": "Blob" + }, + { + "name": "created_at!: DateTime", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "updated_at!: DateTime", + "ordinal": 8, + "type_info": "Text" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + true, + false, + false, + true, + false, + true, + true, + false, + false + ] + }, + "hash": "167422f3d3b74e0f8c9773aabe62b27092c44ec88df58bd3bb4c887351c6cb93" +} diff --git a/crates/db/.sqlx/query-1c6b836c28f8068506f3582bc56fcf2c7e6e784c73fac5fc174fe299902ca4cb.json b/crates/db/.sqlx/query-1c6b836c28f8068506f3582bc56fcf2c7e6e784c73fac5fc174fe299902ca4cb.json deleted file mode 100644 index b4e74613..00000000 --- a/crates/db/.sqlx/query-1c6b836c28f8068506f3582bc56fcf2c7e6e784c73fac5fc174fe299902ca4cb.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "UPDATE tasks\n SET shared_task_id = NULL\n WHERE shared_task_id IN (\n SELECT id FROM shared_tasks WHERE remote_project_id = $1\n )", - "describe": { - "columns": [], - "parameters": { - "Right": 1 - }, - "nullable": [] - }, - "hash": "1c6b836c28f8068506f3582bc56fcf2c7e6e784c73fac5fc174fe299902ca4cb" -} diff --git a/crates/db/.sqlx/query-253a2292b461b964c792ff97adc6e01646a888e221290d312e2773609f97a6c4.json b/crates/db/.sqlx/query-253a2292b461b964c792ff97adc6e01646a888e221290d312e2773609f97a6c4.json deleted file mode 100644 index 14ee162a..00000000 --- a/crates/db/.sqlx/query-253a2292b461b964c792ff97adc6e01646a888e221290d312e2773609f97a6c4.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "DELETE FROM shared_tasks WHERE id = $1", - "describe": { - "columns": [], - "parameters": { - "Right": 1 - }, - "nullable": [] - }, - "hash": "253a2292b461b964c792ff97adc6e01646a888e221290d312e2773609f97a6c4" -} diff --git a/crates/db/.sqlx/query-2a49be016c5999f4069823fc7aa1cd0eeed1b1b1743f50e89ceb2d310c5f18bb.json b/crates/db/.sqlx/query-2a49be016c5999f4069823fc7aa1cd0eeed1b1b1743f50e89ceb2d310c5f18bb.json deleted file mode 100644 index c53db1c5..00000000 --- a/crates/db/.sqlx/query-2a49be016c5999f4069823fc7aa1cd0eeed1b1b1743f50e89ceb2d310c5f18bb.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n SELECT\n remote_project_id AS \"remote_project_id!: Uuid\",\n last_seq AS \"last_seq!: i64\",\n updated_at AS \"updated_at!: DateTime\"\n FROM shared_activity_cursors\n WHERE remote_project_id = $1\n ", - "describe": { - "columns": [ - { - "name": "remote_project_id!: Uuid", - "ordinal": 0, - "type_info": "Blob" - }, - { - "name": "last_seq!: i64", - "ordinal": 1, - "type_info": "Integer" - }, - { - "name": "updated_at!: DateTime", - "ordinal": 2, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - true, - false, - false - ] - }, - "hash": "2a49be016c5999f4069823fc7aa1cd0eeed1b1b1743f50e89ceb2d310c5f18bb" -} diff --git a/crates/db/.sqlx/query-31cbff397a2c2c116f9f7558d04ad2de76b4b3aaa072253172f40ef378998451.json b/crates/db/.sqlx/query-31cbff397a2c2c116f9f7558d04ad2de76b4b3aaa072253172f40ef378998451.json new file mode 100644 index 00000000..dfade8d2 --- /dev/null +++ b/crates/db/.sqlx/query-31cbff397a2c2c116f9f7558d04ad2de76b4b3aaa072253172f40ef378998451.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "UPDATE tasks\n SET shared_task_id = NULL\n WHERE project_id IN (\n SELECT id FROM projects WHERE remote_project_id = $1\n )", + "describe": { + "columns": [], + "parameters": { + "Right": 1 + }, + "nullable": [] + }, + "hash": "31cbff397a2c2c116f9f7558d04ad2de76b4b3aaa072253172f40ef378998451" +} diff --git a/crates/db/.sqlx/query-3cbd8fd4383a9f0899a12783be95972dec2ff6b9d0f3e3ed05bb5a07ea8c6ef0.json b/crates/db/.sqlx/query-3cbd8fd4383a9f0899a12783be95972dec2ff6b9d0f3e3ed05bb5a07ea8c6ef0.json deleted file mode 100644 index 3f30e445..00000000 --- a/crates/db/.sqlx/query-3cbd8fd4383a9f0899a12783be95972dec2ff6b9d0f3e3ed05bb5a07ea8c6ef0.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n SELECT\n id AS \"id!: Uuid\",\n remote_project_id AS \"remote_project_id!: Uuid\",\n title AS title,\n description AS description,\n status AS \"status!: TaskStatus\",\n assignee_user_id AS \"assignee_user_id: Uuid\",\n assignee_first_name AS \"assignee_first_name: String\",\n assignee_last_name AS \"assignee_last_name: String\",\n assignee_username AS \"assignee_username: String\",\n version AS \"version!: i64\",\n last_event_seq AS \"last_event_seq: i64\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM shared_tasks\n WHERE remote_project_id = $1\n ORDER BY updated_at DESC\n ", - "describe": { - "columns": [ - { - "name": "id!: Uuid", - "ordinal": 0, - "type_info": "Blob" - }, - { - "name": "remote_project_id!: Uuid", - "ordinal": 1, - "type_info": "Blob" - }, - { - "name": "title", - "ordinal": 2, - "type_info": "Text" - }, - { - "name": "description", - "ordinal": 3, - "type_info": "Text" - }, - { - "name": "status!: TaskStatus", - "ordinal": 4, - "type_info": "Text" - }, - { - "name": "assignee_user_id: Uuid", - "ordinal": 5, - "type_info": "Blob" - }, - { - "name": "assignee_first_name: String", - "ordinal": 6, - "type_info": "Text" - }, - { - "name": "assignee_last_name: String", - "ordinal": 7, - "type_info": "Text" - }, - { - "name": "assignee_username: String", - "ordinal": 8, - "type_info": "Text" - }, - { - "name": "version!: i64", - "ordinal": 9, - "type_info": "Integer" - }, - { - "name": "last_event_seq: i64", - "ordinal": 10, - "type_info": "Integer" - }, - { - "name": "created_at!: DateTime", - "ordinal": 11, - "type_info": "Text" - }, - { - "name": "updated_at!: DateTime", - "ordinal": 12, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - true, - false, - false, - true, - false, - true, - true, - true, - true, - false, - true, - false, - false - ] - }, - "hash": "3cbd8fd4383a9f0899a12783be95972dec2ff6b9d0f3e3ed05bb5a07ea8c6ef0" -} diff --git a/crates/db/.sqlx/query-ae8e284c805801a381ba6b700717884e6701e6e18db4bf019684ace8d8941edc.json b/crates/db/.sqlx/query-4ecc6054f64e2e4adeefc0ab5e769a77a96d1211447b426606253f50fd3e4e6d.json similarity index 89% rename from crates/db/.sqlx/query-ae8e284c805801a381ba6b700717884e6701e6e18db4bf019684ace8d8941edc.json rename to crates/db/.sqlx/query-4ecc6054f64e2e4adeefc0ab5e769a77a96d1211447b426606253f50fd3e4e6d.json index 10fbd8fb..20f4ee8b 100644 --- a/crates/db/.sqlx/query-ae8e284c805801a381ba6b700717884e6701e6e18db4bf019684ace8d8941edc.json +++ b/crates/db/.sqlx/query-4ecc6054f64e2e4adeefc0ab5e769a77a96d1211447b426606253f50fd3e4e6d.json @@ -1,6 +1,6 @@ { "db_name": "SQLite", - "query": "SELECT id as \"id!: Uuid\", project_id as \"project_id!: Uuid\", title, description, status as \"status!: TaskStatus\", parent_task_attempt as \"parent_task_attempt: Uuid\", shared_task_id as \"shared_task_id: Uuid\", created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"\n FROM tasks \n WHERE shared_task_id = $1\n LIMIT 1", + "query": "SELECT id as \"id!: Uuid\", project_id as \"project_id!: Uuid\", title, description, status as \"status!: TaskStatus\", parent_task_attempt as \"parent_task_attempt: Uuid\", shared_task_id as \"shared_task_id: Uuid\", created_at as \"created_at!: DateTime\", updated_at as \"updated_at!: DateTime\"\n FROM tasks\n WHERE shared_task_id = $1\n LIMIT 1", "describe": { "columns": [ { @@ -64,5 +64,5 @@ false ] }, - "hash": "ae8e284c805801a381ba6b700717884e6701e6e18db4bf019684ace8d8941edc" + "hash": "4ecc6054f64e2e4adeefc0ab5e769a77a96d1211447b426606253f50fd3e4e6d" } diff --git a/crates/db/.sqlx/query-5393ad53affc4e19668d3b522f038fe0dd01993e236c5964ea7671ff22f697c8.json b/crates/db/.sqlx/query-5393ad53affc4e19668d3b522f038fe0dd01993e236c5964ea7671ff22f697c8.json deleted file mode 100644 index 59913131..00000000 --- a/crates/db/.sqlx/query-5393ad53affc4e19668d3b522f038fe0dd01993e236c5964ea7671ff22f697c8.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n INSERT INTO tasks (\n id,\n project_id,\n title,\n description,\n status,\n shared_task_id\n )\n SELECT\n $1,\n $2,\n $3,\n $4,\n $5,\n $6\n WHERE $7\n OR EXISTS (\n SELECT 1 FROM tasks WHERE shared_task_id = $6\n )\n ON CONFLICT(shared_task_id) WHERE shared_task_id IS NOT NULL DO UPDATE SET\n project_id = excluded.project_id,\n title = excluded.title,\n description = excluded.description,\n status = excluded.status,\n updated_at = datetime('now', 'subsec')\n ", - "describe": { - "columns": [], - "parameters": { - "Right": 7 - }, - "nullable": [] - }, - "hash": "5393ad53affc4e19668d3b522f038fe0dd01993e236c5964ea7671ff22f697c8" -} diff --git a/crates/db/.sqlx/query-6a4e4fd60ae727839029a4d00c0626d0f8d0d78edb1d76af3be11dcb788f34aa.json b/crates/db/.sqlx/query-6a4e4fd60ae727839029a4d00c0626d0f8d0d78edb1d76af3be11dcb788f34aa.json deleted file mode 100644 index 0c91a17b..00000000 --- a/crates/db/.sqlx/query-6a4e4fd60ae727839029a4d00c0626d0f8d0d78edb1d76af3be11dcb788f34aa.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n INSERT INTO shared_activity_cursors (\n remote_project_id,\n last_seq,\n updated_at\n )\n VALUES (\n $1,\n $2,\n datetime('now', 'subsec')\n )\n ON CONFLICT(remote_project_id) DO UPDATE SET\n last_seq = excluded.last_seq,\n updated_at = excluded.updated_at\n RETURNING\n remote_project_id AS \"remote_project_id!: Uuid\",\n last_seq AS \"last_seq!: i64\",\n updated_at AS \"updated_at!: DateTime\"\n ", - "describe": { - "columns": [ - { - "name": "remote_project_id!: Uuid", - "ordinal": 0, - "type_info": "Blob" - }, - { - "name": "last_seq!: i64", - "ordinal": 1, - "type_info": "Integer" - }, - { - "name": "updated_at!: DateTime", - "ordinal": 2, - "type_info": "Text" - } - ], - "parameters": { - "Right": 2 - }, - "nullable": [ - true, - false, - false - ] - }, - "hash": "6a4e4fd60ae727839029a4d00c0626d0f8d0d78edb1d76af3be11dcb788f34aa" -} diff --git a/crates/db/.sqlx/query-6d3443d4f96369fa72df0ddd2f06d1fbb36b22a46ed421865d699907e5e71451.json b/crates/db/.sqlx/query-6d3443d4f96369fa72df0ddd2f06d1fbb36b22a46ed421865d699907e5e71451.json deleted file mode 100644 index 3613da90..00000000 --- a/crates/db/.sqlx/query-6d3443d4f96369fa72df0ddd2f06d1fbb36b22a46ed421865d699907e5e71451.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n INSERT INTO shared_tasks (\n id,\n remote_project_id,\n title,\n description,\n status,\n assignee_user_id,\n assignee_first_name,\n assignee_last_name,\n assignee_username,\n version,\n last_event_seq,\n created_at,\n updated_at\n )\n VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13\n )\n ON CONFLICT(id) DO UPDATE SET\n remote_project_id = excluded.remote_project_id,\n title = excluded.title,\n description = excluded.description,\n status = excluded.status,\n assignee_user_id = excluded.assignee_user_id,\n assignee_first_name = excluded.assignee_first_name,\n assignee_last_name = excluded.assignee_last_name,\n assignee_username = excluded.assignee_username,\n version = excluded.version,\n last_event_seq = excluded.last_event_seq,\n created_at = excluded.created_at,\n updated_at = excluded.updated_at\n RETURNING\n id AS \"id!: Uuid\",\n remote_project_id AS \"remote_project_id!: Uuid\",\n title AS title,\n description AS description,\n status AS \"status!: TaskStatus\",\n assignee_user_id AS \"assignee_user_id: Uuid\",\n assignee_first_name AS \"assignee_first_name: String\",\n assignee_last_name AS \"assignee_last_name: String\",\n assignee_username AS \"assignee_username: String\",\n version AS \"version!: i64\",\n last_event_seq AS \"last_event_seq: i64\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n ", - "describe": { - "columns": [ - { - "name": "id!: Uuid", - "ordinal": 0, - "type_info": "Blob" - }, - { - "name": "remote_project_id!: Uuid", - "ordinal": 1, - "type_info": "Blob" - }, - { - "name": "title", - "ordinal": 2, - "type_info": "Text" - }, - { - "name": "description", - "ordinal": 3, - "type_info": "Text" - }, - { - "name": "status!: TaskStatus", - "ordinal": 4, - "type_info": "Text" - }, - { - "name": "assignee_user_id: Uuid", - "ordinal": 5, - "type_info": "Blob" - }, - { - "name": "assignee_first_name: String", - "ordinal": 6, - "type_info": "Text" - }, - { - "name": "assignee_last_name: String", - "ordinal": 7, - "type_info": "Text" - }, - { - "name": "assignee_username: String", - "ordinal": 8, - "type_info": "Text" - }, - { - "name": "version!: i64", - "ordinal": 9, - "type_info": "Integer" - }, - { - "name": "last_event_seq: i64", - "ordinal": 10, - "type_info": "Integer" - }, - { - "name": "created_at!: DateTime", - "ordinal": 11, - "type_info": "Text" - }, - { - "name": "updated_at!: DateTime", - "ordinal": 12, - "type_info": "Text" - } - ], - "parameters": { - "Right": 13 - }, - "nullable": [ - true, - false, - false, - true, - false, - true, - true, - true, - true, - false, - true, - false, - false - ] - }, - "hash": "6d3443d4f96369fa72df0ddd2f06d1fbb36b22a46ed421865d699907e5e71451" -} diff --git a/crates/db/.sqlx/query-b742031d1362f7fd7c63ab183af04be8fa79f8f6340d3e27c703a9c58b7c7805.json b/crates/db/.sqlx/query-b742031d1362f7fd7c63ab183af04be8fa79f8f6340d3e27c703a9c58b7c7805.json deleted file mode 100644 index b88c24b4..00000000 --- a/crates/db/.sqlx/query-b742031d1362f7fd7c63ab183af04be8fa79f8f6340d3e27c703a9c58b7c7805.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n SELECT\n id AS \"id!: Uuid\",\n remote_project_id AS \"remote_project_id!: Uuid\",\n title AS title,\n description AS description,\n status AS \"status!: TaskStatus\",\n assignee_user_id AS \"assignee_user_id: Uuid\",\n assignee_first_name AS \"assignee_first_name: String\",\n assignee_last_name AS \"assignee_last_name: String\",\n assignee_username AS \"assignee_username: String\",\n version AS \"version!: i64\",\n last_event_seq AS \"last_event_seq: i64\",\n created_at AS \"created_at!: DateTime\",\n updated_at AS \"updated_at!: DateTime\"\n FROM shared_tasks\n WHERE id = $1\n ", - "describe": { - "columns": [ - { - "name": "id!: Uuid", - "ordinal": 0, - "type_info": "Blob" - }, - { - "name": "remote_project_id!: Uuid", - "ordinal": 1, - "type_info": "Blob" - }, - { - "name": "title", - "ordinal": 2, - "type_info": "Text" - }, - { - "name": "description", - "ordinal": 3, - "type_info": "Text" - }, - { - "name": "status!: TaskStatus", - "ordinal": 4, - "type_info": "Text" - }, - { - "name": "assignee_user_id: Uuid", - "ordinal": 5, - "type_info": "Blob" - }, - { - "name": "assignee_first_name: String", - "ordinal": 6, - "type_info": "Text" - }, - { - "name": "assignee_last_name: String", - "ordinal": 7, - "type_info": "Text" - }, - { - "name": "assignee_username: String", - "ordinal": 8, - "type_info": "Text" - }, - { - "name": "version!: i64", - "ordinal": 9, - "type_info": "Integer" - }, - { - "name": "last_event_seq: i64", - "ordinal": 10, - "type_info": "Integer" - }, - { - "name": "created_at!: DateTime", - "ordinal": 11, - "type_info": "Text" - }, - { - "name": "updated_at!: DateTime", - "ordinal": 12, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - true, - false, - false, - true, - false, - true, - true, - true, - true, - false, - true, - false, - false - ] - }, - "hash": "b742031d1362f7fd7c63ab183af04be8fa79f8f6340d3e27c703a9c58b7c7805" -} diff --git a/crates/db/migrations/20251202000000_migrate_to_electric.sql b/crates/db/migrations/20251202000000_migrate_to_electric.sql new file mode 100644 index 00000000..5df95cb4 --- /dev/null +++ b/crates/db/migrations/20251202000000_migrate_to_electric.sql @@ -0,0 +1,23 @@ +DROP TABLE IF EXISTS shared_activity_cursors; + +-- Drop the index on the old column if it exists +DROP INDEX IF EXISTS idx_tasks_shared_task_unique; + +-- Add new column to hold the data +ALTER TABLE tasks ADD COLUMN shared_task_id_new BLOB; + +-- Migrate data +UPDATE tasks SET shared_task_id_new = shared_task_id; + +-- Drop the old column (removing the foreign key constraint) +ALTER TABLE tasks DROP COLUMN shared_task_id; + +-- Rename the new column to the old name +ALTER TABLE tasks RENAME COLUMN shared_task_id_new TO shared_task_id; + +-- Recreate the index +CREATE UNIQUE INDEX IF NOT EXISTS idx_tasks_shared_task_unique + ON tasks(shared_task_id) + WHERE shared_task_id IS NOT NULL; + +DROP TABLE IF EXISTS shared_tasks; \ No newline at end of file diff --git a/crates/db/src/models/mod.rs b/crates/db/src/models/mod.rs index 812cd252..263d1675 100644 --- a/crates/db/src/models/mod.rs +++ b/crates/db/src/models/mod.rs @@ -5,7 +5,6 @@ pub mod image; pub mod merge; pub mod project; pub mod scratch; -pub mod shared_task; pub mod tag; pub mod task; pub mod task_attempt; diff --git a/crates/db/src/models/shared_task.rs b/crates/db/src/models/shared_task.rs deleted file mode 100644 index b2a8dae9..00000000 --- a/crates/db/src/models/shared_task.rs +++ /dev/null @@ -1,297 +0,0 @@ -use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; -use sqlx::{Executor, FromRow, QueryBuilder, Sqlite, SqlitePool}; -use ts_rs::TS; -use uuid::Uuid; - -use super::task::TaskStatus; - -#[derive(Debug, Clone, FromRow, Serialize, Deserialize, TS)] -pub struct SharedTask { - pub id: Uuid, - pub remote_project_id: Uuid, - pub title: String, - pub description: Option, - pub status: TaskStatus, - pub assignee_user_id: Option, - pub assignee_first_name: Option, - pub assignee_last_name: Option, - pub assignee_username: Option, - pub version: i64, - pub last_event_seq: Option, - #[ts(type = "Date")] - pub created_at: DateTime, - #[ts(type = "Date")] - pub updated_at: DateTime, -} - -#[derive(Debug, Clone)] -pub struct SharedTaskInput { - pub id: Uuid, - pub remote_project_id: Uuid, - pub title: String, - pub description: Option, - pub status: TaskStatus, - pub assignee_user_id: Option, - pub assignee_first_name: Option, - pub assignee_last_name: Option, - pub assignee_username: Option, - pub version: i64, - pub last_event_seq: Option, - pub created_at: DateTime, - pub updated_at: DateTime, -} - -impl SharedTask { - pub async fn list_by_remote_project_id( - pool: &SqlitePool, - remote_project_id: Uuid, - ) -> Result, sqlx::Error> { - sqlx::query_as!( - SharedTask, - r#" - SELECT - id AS "id!: Uuid", - remote_project_id AS "remote_project_id!: Uuid", - title AS title, - description AS description, - status AS "status!: TaskStatus", - assignee_user_id AS "assignee_user_id: Uuid", - assignee_first_name AS "assignee_first_name: String", - assignee_last_name AS "assignee_last_name: String", - assignee_username AS "assignee_username: String", - version AS "version!: i64", - last_event_seq AS "last_event_seq: i64", - created_at AS "created_at!: DateTime", - updated_at AS "updated_at!: DateTime" - FROM shared_tasks - WHERE remote_project_id = $1 - ORDER BY updated_at DESC - "#, - remote_project_id - ) - .fetch_all(pool) - .await - } - - pub async fn upsert<'e, E>(executor: E, data: SharedTaskInput) -> Result - where - E: Executor<'e, Database = Sqlite>, - { - let status = data.status.clone(); - sqlx::query_as!( - SharedTask, - r#" - INSERT INTO shared_tasks ( - id, - remote_project_id, - title, - description, - status, - assignee_user_id, - assignee_first_name, - assignee_last_name, - assignee_username, - version, - last_event_seq, - created_at, - updated_at - ) - VALUES ( - $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13 - ) - ON CONFLICT(id) DO UPDATE SET - remote_project_id = excluded.remote_project_id, - title = excluded.title, - description = excluded.description, - status = excluded.status, - assignee_user_id = excluded.assignee_user_id, - assignee_first_name = excluded.assignee_first_name, - assignee_last_name = excluded.assignee_last_name, - assignee_username = excluded.assignee_username, - version = excluded.version, - last_event_seq = excluded.last_event_seq, - created_at = excluded.created_at, - updated_at = excluded.updated_at - RETURNING - id AS "id!: Uuid", - remote_project_id AS "remote_project_id!: Uuid", - title AS title, - description AS description, - status AS "status!: TaskStatus", - assignee_user_id AS "assignee_user_id: Uuid", - assignee_first_name AS "assignee_first_name: String", - assignee_last_name AS "assignee_last_name: String", - assignee_username AS "assignee_username: String", - version AS "version!: i64", - last_event_seq AS "last_event_seq: i64", - created_at AS "created_at!: DateTime", - updated_at AS "updated_at!: DateTime" - "#, - data.id, - data.remote_project_id, - data.title, - data.description, - status, - data.assignee_user_id, - data.assignee_first_name, - data.assignee_last_name, - data.assignee_username, - data.version, - data.last_event_seq, - data.created_at, - data.updated_at - ) - .fetch_one(executor) - .await - } - - pub async fn find_by_id(pool: &SqlitePool, id: Uuid) -> Result, sqlx::Error> { - sqlx::query_as!( - SharedTask, - r#" - SELECT - id AS "id!: Uuid", - remote_project_id AS "remote_project_id!: Uuid", - title AS title, - description AS description, - status AS "status!: TaskStatus", - assignee_user_id AS "assignee_user_id: Uuid", - assignee_first_name AS "assignee_first_name: String", - assignee_last_name AS "assignee_last_name: String", - assignee_username AS "assignee_username: String", - version AS "version!: i64", - last_event_seq AS "last_event_seq: i64", - created_at AS "created_at!: DateTime", - updated_at AS "updated_at!: DateTime" - FROM shared_tasks - WHERE id = $1 - "#, - id - ) - .fetch_optional(pool) - .await - } - - pub async fn remove<'e, E>(executor: E, id: Uuid) -> Result<(), sqlx::Error> - where - E: Executor<'e, Database = Sqlite>, - { - sqlx::query!("DELETE FROM shared_tasks WHERE id = $1", id) - .execute(executor) - .await?; - Ok(()) - } - - pub async fn remove_many<'e, E>(executor: E, ids: &[Uuid]) -> Result<(), sqlx::Error> - where - E: Executor<'e, Database = Sqlite>, - { - if ids.is_empty() { - return Ok(()); - } - - let mut builder = QueryBuilder::::new("DELETE FROM shared_tasks WHERE id IN ("); - { - let mut separated = builder.separated(", "); - for id in ids { - separated.push_bind(id); - } - } - builder.push(")"); - builder.build().execute(executor).await?; - Ok(()) - } - - pub async fn find_by_rowid(pool: &SqlitePool, rowid: i64) -> Result, sqlx::Error> { - sqlx::query_as!( - SharedTask, - r#" - SELECT - id AS "id!: Uuid", - remote_project_id AS "remote_project_id!: Uuid", - title AS title, - description AS description, - status AS "status!: TaskStatus", - assignee_user_id AS "assignee_user_id: Uuid", - assignee_first_name AS "assignee_first_name: String", - assignee_last_name AS "assignee_last_name: String", - assignee_username AS "assignee_username: String", - version AS "version!: i64", - last_event_seq AS "last_event_seq: i64", - created_at AS "created_at!: DateTime", - updated_at AS "updated_at!: DateTime" - FROM shared_tasks - WHERE rowid = $1 - "#, - rowid - ) - .fetch_optional(pool) - .await - } -} - -#[derive(Debug, Clone, FromRow)] -pub struct SharedActivityCursor { - pub remote_project_id: Uuid, - pub last_seq: i64, - pub updated_at: DateTime, -} - -impl SharedActivityCursor { - pub async fn get( - pool: &SqlitePool, - remote_project_id: Uuid, - ) -> Result, sqlx::Error> { - sqlx::query_as!( - SharedActivityCursor, - r#" - SELECT - remote_project_id AS "remote_project_id!: Uuid", - last_seq AS "last_seq!: i64", - updated_at AS "updated_at!: DateTime" - FROM shared_activity_cursors - WHERE remote_project_id = $1 - "#, - remote_project_id - ) - .fetch_optional(pool) - .await - } - - pub async fn upsert<'e, E>( - executor: E, - remote_project_id: Uuid, - last_seq: i64, - ) -> Result - where - E: Executor<'e, Database = Sqlite>, - { - sqlx::query_as!( - SharedActivityCursor, - r#" - INSERT INTO shared_activity_cursors ( - remote_project_id, - last_seq, - updated_at - ) - VALUES ( - $1, - $2, - datetime('now', 'subsec') - ) - ON CONFLICT(remote_project_id) DO UPDATE SET - last_seq = excluded.last_seq, - updated_at = excluded.updated_at - RETURNING - remote_project_id AS "remote_project_id!: Uuid", - last_seq AS "last_seq!: i64", - updated_at AS "updated_at!: DateTime" - "#, - remote_project_id, - last_seq - ) - .fetch_one(executor) - .await - } -} diff --git a/crates/db/src/models/task.rs b/crates/db/src/models/task.rs index 1da63ea2..107d6831 100644 --- a/crates/db/src/models/task.rs +++ b/crates/db/src/models/task.rs @@ -12,7 +12,7 @@ use super::{project::Project, task_attempt::TaskAttempt}; )] #[sqlx(type_name = "task_status", rename_all = "lowercase")] #[serde(rename_all = "lowercase")] -#[strum(serialize_all = "kebab_case")] +#[strum(serialize_all = "lowercase")] pub enum TaskStatus { #[default] Todo, @@ -113,15 +113,6 @@ impl CreateTask { } } -#[derive(Debug, Clone)] -pub struct SyncTask { - pub shared_task_id: Uuid, - pub project_id: Uuid, - pub title: String, - pub description: Option, - pub status: TaskStatus, -} - #[derive(Debug, Serialize, Deserialize, TS)] pub struct UpdateTask { pub title: Option, @@ -273,7 +264,7 @@ ORDER BY t.created_at DESC"#, sqlx::query_as!( Task, r#"SELECT id as "id!: Uuid", project_id as "project_id!: Uuid", title, description, status as "status!: TaskStatus", parent_task_attempt as "parent_task_attempt: Uuid", shared_task_id as "shared_task_id: Uuid", created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" - FROM tasks + FROM tasks WHERE shared_task_id = $1 LIMIT 1"#, shared_task_id @@ -282,6 +273,17 @@ ORDER BY t.created_at DESC"#, .await } + pub async fn find_all_shared(pool: &SqlitePool) -> Result, sqlx::Error> { + sqlx::query_as!( + Task, + r#"SELECT id as "id!: Uuid", project_id as "project_id!: Uuid", title, description, status as "status!: TaskStatus", parent_task_attempt as "parent_task_attempt: Uuid", shared_task_id as "shared_task_id: Uuid", created_at as "created_at!: DateTime", updated_at as "updated_at!: DateTime" + FROM tasks + WHERE shared_task_id IS NOT NULL"# + ) + .fetch_all(pool) + .await + } + pub async fn create( pool: &SqlitePool, data: &CreateTask, @@ -331,58 +333,6 @@ ORDER BY t.created_at DESC"#, .await } - pub async fn sync_from_shared_task<'e, E>( - executor: E, - data: SyncTask, - create_if_not_exists: bool, - ) -> Result - where - E: Executor<'e, Database = Sqlite>, - { - let new_task_id = Uuid::new_v4(); - - let result = sqlx::query!( - r#" - INSERT INTO tasks ( - id, - project_id, - title, - description, - status, - shared_task_id - ) - SELECT - $1, - $2, - $3, - $4, - $5, - $6 - WHERE $7 - OR EXISTS ( - SELECT 1 FROM tasks WHERE shared_task_id = $6 - ) - ON CONFLICT(shared_task_id) WHERE shared_task_id IS NOT NULL DO UPDATE SET - project_id = excluded.project_id, - title = excluded.title, - description = excluded.description, - status = excluded.status, - updated_at = datetime('now', 'subsec') - "#, - new_task_id, - data.project_id, - data.title, - data.description, - data.status, - data.shared_task_id, - create_if_not_exists - ) - .execute(executor) - .await?; - - Ok(result.rows_affected() > 0) - } - pub async fn update_status( pool: &SqlitePool, id: Uuid, @@ -428,8 +378,8 @@ ORDER BY t.created_at DESC"#, let result = sqlx::query!( r#"UPDATE tasks SET shared_task_id = NULL - WHERE shared_task_id IN ( - SELECT id FROM shared_tasks WHERE remote_project_id = $1 + WHERE project_id IN ( + SELECT id FROM projects WHERE remote_project_id = $1 )"#, remote_project_id ) @@ -466,6 +416,31 @@ ORDER BY t.created_at DESC"#, Ok(()) } + pub async fn batch_unlink_shared_tasks<'e, E>( + executor: E, + shared_task_ids: &[Uuid], + ) -> Result + where + E: Executor<'e, Database = Sqlite>, + { + if shared_task_ids.is_empty() { + return Ok(0); + } + + let mut query_builder = sqlx::QueryBuilder::new( + "UPDATE tasks SET shared_task_id = NULL, updated_at = CURRENT_TIMESTAMP WHERE shared_task_id IN (", + ); + + let mut separated = query_builder.separated(", "); + for id in shared_task_ids { + separated.push_bind(id); + } + separated.push_unseparated(")"); + + let result = query_builder.build().execute(executor).await?; + Ok(result.rows_affected()) + } + pub async fn exists( pool: &SqlitePool, id: Uuid, diff --git a/crates/deployment/src/lib.rs b/crates/deployment/src/lib.rs index 47b993d3..1df14453 100644 --- a/crates/deployment/src/lib.rs +++ b/crates/deployment/src/lib.rs @@ -28,12 +28,12 @@ use services::services::{ image::{ImageError, ImageService}, pr_monitor::PrMonitorService, queued_message::QueuedMessageService, - share::{RemoteSync, RemoteSyncHandle, ShareConfig, SharePublisher}, + share::SharePublisher, worktree_manager::WorktreeError, }; use sqlx::{Error as SqlxError, types::Uuid}; use thiserror::Error; -use tokio::sync::{Mutex, RwLock}; +use tokio::sync::RwLock; use utils::sentry as sentry_utils; #[derive(Debug, Clone, Copy, Error)] @@ -106,26 +106,6 @@ pub trait Deployment: Clone + Send + Sync + 'static { fn share_publisher(&self) -> Result; - fn share_sync_handle(&self) -> &Arc>>; - - fn spawn_remote_sync(&self, config: ShareConfig) { - let deployment = self.clone(); - let handle_slot = self.share_sync_handle().clone(); - tokio::spawn(async move { - tracing::info!("Starting shared task sync"); - - let remote_sync_handle = RemoteSync::spawn( - deployment.db().clone(), - config, - deployment.auth_context().clone(), - ); - { - let mut guard = handle_slot.lock().await; - *guard = Some(remote_sync_handle); - } - }); - } - async fn update_sentry_scope(&self) -> Result<(), DeploymentError> { let user_id = self.user_id(); let config = self.config().read().await; diff --git a/crates/local-deployment/Cargo.toml b/crates/local-deployment/Cargo.toml index f22771b5..9f178916 100644 --- a/crates/local-deployment/Cargo.toml +++ b/crates/local-deployment/Cargo.toml @@ -26,6 +26,5 @@ notify-debouncer-full = "0.5.0" reqwest = { version = "0.12", features = ["json"] } sentry = { version = "0.41.0", features = ["anyhow", "backtrace", "panic", "debug-images"] } futures = "0.3" -async-stream = "0.3" json-patch = "2.0" tokio = { workspace = true } diff --git a/crates/local-deployment/src/lib.rs b/crates/local-deployment/src/lib.rs index 201b68bf..d4b7af93 100644 --- a/crates/local-deployment/src/lib.rs +++ b/crates/local-deployment/src/lib.rs @@ -18,9 +18,9 @@ use services::services::{ oauth_credentials::OAuthCredentials, queued_message::QueuedMessageService, remote_client::{RemoteClient, RemoteClientError}, - share::{RemoteSyncHandle, ShareConfig, SharePublisher}, + share::{ShareConfig, SharePublisher}, }; -use tokio::sync::{Mutex, RwLock}; +use tokio::sync::RwLock; use utils::{ api::oauth::LoginStatus, assets::{config_path, credentials_path}, @@ -47,7 +47,6 @@ pub struct LocalDeployment { approvals: Approvals, queued_message_service: QueuedMessageService, share_publisher: Result, - share_sync_handle: Arc>>, share_config: Option, remote_client: Result, auth_context: AuthContext, @@ -159,14 +158,6 @@ impl Deployment for LocalDeployment { .map_err(|e| *e); let oauth_handoffs = Arc::new(RwLock::new(HashMap::new())); - let share_sync_handle = Arc::new(Mutex::new(None)); - - let mut share_sync_config: Option = None; - if let (Some(sc_ref), Ok(_)) = (share_config.as_ref(), &share_publisher) - && oauth_credentials.get().await.is_some() - { - share_sync_config = Some(sc_ref.clone()); - } // We need to make analytics accessible to the ContainerService // TODO: Handle this more gracefully @@ -205,17 +196,12 @@ impl Deployment for LocalDeployment { approvals, queued_message_service, share_publisher, - share_sync_handle: share_sync_handle.clone(), share_config: share_config.clone(), remote_client, auth_context, oauth_handoffs, }; - if let Some(sc) = share_sync_config { - deployment.spawn_remote_sync(sc); - } - Ok(deployment) } @@ -271,10 +257,6 @@ impl Deployment for LocalDeployment { self.share_publisher.clone() } - fn share_sync_handle(&self) -> &Arc>> { - &self.share_sync_handle - } - fn auth_context(&self) -> &AuthContext { &self.auth_context } diff --git a/crates/remote/.sqlx/query-e185c68e4809dddb5dd1e59f1cb123c4e02499d42d97df65fc7a625568d4d234.json b/crates/remote/.sqlx/query-1a8fb6c222b7eb3077fba6a7722faa1af89e268a644e7e7237ae21b03221dc9b.json similarity index 60% rename from crates/remote/.sqlx/query-e185c68e4809dddb5dd1e59f1cb123c4e02499d42d97df65fc7a625568d4d234.json rename to crates/remote/.sqlx/query-1a8fb6c222b7eb3077fba6a7722faa1af89e268a644e7e7237ae21b03221dc9b.json index 5974a2f4..7dd2a3b2 100644 --- a/crates/remote/.sqlx/query-e185c68e4809dddb5dd1e59f1cb123c4e02499d42d97df65fc7a625568d4d234.json +++ b/crates/remote/.sqlx/query-1a8fb6c222b7eb3077fba6a7722faa1af89e268a644e7e7237ae21b03221dc9b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE shared_tasks AS t\n SET deleted_at = NOW(),\n deleted_by_user_id = $3,\n version = t.version + 1\n WHERE t.id = $1\n AND t.version = COALESCE($2, t.version)\n AND t.assignee_user_id = $3\n AND t.deleted_at IS NULL\n RETURNING\n t.id AS \"id!\",\n t.organization_id AS \"organization_id!: Uuid\",\n t.project_id AS \"project_id!\",\n t.creator_user_id AS \"creator_user_id?: Uuid\",\n t.assignee_user_id AS \"assignee_user_id?: Uuid\",\n t.deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n t.title AS \"title!\",\n t.description AS \"description?\",\n t.status AS \"status!: TaskStatus\",\n t.version AS \"version!\",\n t.deleted_at AS \"deleted_at?\",\n t.shared_at AS \"shared_at?\",\n t.created_at AS \"created_at!\",\n t.updated_at AS \"updated_at!\"\n ", + "query": "\n UPDATE shared_tasks AS t\n SET deleted_at = NOW(),\n deleted_by_user_id = $2\n WHERE t.id = $1\n AND t.assignee_user_id = $2\n AND t.deleted_at IS NULL\n RETURNING\n t.id AS \"id!\",\n t.organization_id AS \"organization_id!: Uuid\",\n t.project_id AS \"project_id!\",\n t.creator_user_id AS \"creator_user_id?: Uuid\",\n t.assignee_user_id AS \"assignee_user_id?: Uuid\",\n t.deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n t.title AS \"title!\",\n t.description AS \"description?\",\n t.status AS \"status!: TaskStatus\",\n t.deleted_at AS \"deleted_at?\",\n t.shared_at AS \"shared_at?\",\n t.created_at AS \"created_at!\",\n t.updated_at AS \"updated_at!\"\n ", "describe": { "columns": [ { @@ -52,8 +52,8 @@ "kind": { "Enum": [ "todo", - "in-progress", - "in-review", + "inprogress", + "inreview", "done", "cancelled" ] @@ -63,26 +63,21 @@ }, { "ordinal": 9, - "name": "version!", - "type_info": "Int8" - }, - { - "ordinal": 10, "name": "deleted_at?", "type_info": "Timestamptz" }, { - "ordinal": 11, + "ordinal": 10, "name": "shared_at?", "type_info": "Timestamptz" }, { - "ordinal": 12, + "ordinal": 11, "name": "created_at!", "type_info": "Timestamptz" }, { - "ordinal": 13, + "ordinal": 12, "name": "updated_at!", "type_info": "Timestamptz" } @@ -90,7 +85,6 @@ "parameters": { "Left": [ "Uuid", - "Int8", "Uuid" ] }, @@ -104,12 +98,11 @@ false, true, false, - false, true, true, false, false ] }, - "hash": "e185c68e4809dddb5dd1e59f1cb123c4e02499d42d97df65fc7a625568d4d234" + "hash": "1a8fb6c222b7eb3077fba6a7722faa1af89e268a644e7e7237ae21b03221dc9b" } diff --git a/crates/remote/.sqlx/query-1d691b943af2d90feaace911403fbb158839b4359f91fd5c05166ecee82b13a8.json b/crates/remote/.sqlx/query-338507619ddbadce5d40bc58a7d9eb95bbeee3ade4d5abb9140aefe5673ea071.json similarity index 63% rename from crates/remote/.sqlx/query-1d691b943af2d90feaace911403fbb158839b4359f91fd5c05166ecee82b13a8.json rename to crates/remote/.sqlx/query-338507619ddbadce5d40bc58a7d9eb95bbeee3ade4d5abb9140aefe5673ea071.json index 2161ca6d..9039c90d 100644 --- a/crates/remote/.sqlx/query-1d691b943af2d90feaace911403fbb158839b4359f91fd5c05166ecee82b13a8.json +++ b/crates/remote/.sqlx/query-338507619ddbadce5d40bc58a7d9eb95bbeee3ade4d5abb9140aefe5673ea071.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE shared_tasks AS t\n SET title = COALESCE($2, t.title),\n description = COALESCE($3, t.description),\n status = COALESCE($4, t.status),\n version = t.version + 1,\n updated_at = NOW()\n WHERE t.id = $1\n AND t.version = COALESCE($5, t.version)\n AND t.assignee_user_id = $6\n AND t.deleted_at IS NULL\n RETURNING\n t.id AS \"id!\",\n t.organization_id AS \"organization_id!: Uuid\",\n t.project_id AS \"project_id!\",\n t.creator_user_id AS \"creator_user_id?: Uuid\",\n t.assignee_user_id AS \"assignee_user_id?: Uuid\",\n t.deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n t.title AS \"title!\",\n t.description AS \"description?\",\n t.status AS \"status!: TaskStatus\",\n t.version AS \"version!\",\n t.deleted_at AS \"deleted_at?\",\n t.shared_at AS \"shared_at?\",\n t.created_at AS \"created_at!\",\n t.updated_at AS \"updated_at!\"\n ", + "query": "\n UPDATE shared_tasks AS t\n SET title = COALESCE($2, t.title),\n description = COALESCE($3, t.description),\n status = COALESCE($4, t.status),\n updated_at = NOW()\n WHERE t.id = $1\n AND t.assignee_user_id = $5\n AND t.deleted_at IS NULL\n RETURNING\n t.id AS \"id!\",\n t.organization_id AS \"organization_id!: Uuid\",\n t.project_id AS \"project_id!\",\n t.creator_user_id AS \"creator_user_id?: Uuid\",\n t.assignee_user_id AS \"assignee_user_id?: Uuid\",\n t.deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n t.title AS \"title!\",\n t.description AS \"description?\",\n t.status AS \"status!: TaskStatus\",\n t.deleted_at AS \"deleted_at?\",\n t.shared_at AS \"shared_at?\",\n t.created_at AS \"created_at!\",\n t.updated_at AS \"updated_at!\"\n ", "describe": { "columns": [ { @@ -52,8 +52,8 @@ "kind": { "Enum": [ "todo", - "in-progress", - "in-review", + "inprogress", + "inreview", "done", "cancelled" ] @@ -63,26 +63,21 @@ }, { "ordinal": 9, - "name": "version!", - "type_info": "Int8" - }, - { - "ordinal": 10, "name": "deleted_at?", "type_info": "Timestamptz" }, { - "ordinal": 11, + "ordinal": 10, "name": "shared_at?", "type_info": "Timestamptz" }, { - "ordinal": 12, + "ordinal": 11, "name": "created_at!", "type_info": "Timestamptz" }, { - "ordinal": 13, + "ordinal": 12, "name": "updated_at!", "type_info": "Timestamptz" } @@ -98,15 +93,14 @@ "kind": { "Enum": [ "todo", - "in-progress", - "in-review", + "inprogress", + "inreview", "done", "cancelled" ] } } }, - "Int8", "Uuid" ] }, @@ -120,12 +114,11 @@ false, true, false, - false, true, true, false, false ] }, - "hash": "1d691b943af2d90feaace911403fbb158839b4359f91fd5c05166ecee82b13a8" + "hash": "338507619ddbadce5d40bc58a7d9eb95bbeee3ade4d5abb9140aefe5673ea071" } diff --git a/crates/remote/.sqlx/query-2a9a7c649ededf8772f750bb42c5144f4ab5e74dc905fb8a63340f09fd55a3d7.json b/crates/remote/.sqlx/query-3ba7efc786500c8a72dec5fb0f76b66da861b8ca8905080ef70a16943e97f004.json similarity index 79% rename from crates/remote/.sqlx/query-2a9a7c649ededf8772f750bb42c5144f4ab5e74dc905fb8a63340f09fd55a3d7.json rename to crates/remote/.sqlx/query-3ba7efc786500c8a72dec5fb0f76b66da861b8ca8905080ef70a16943e97f004.json index 634990e3..9515abc6 100644 --- a/crates/remote/.sqlx/query-2a9a7c649ededf8772f750bb42c5144f4ab5e74dc905fb8a63340f09fd55a3d7.json +++ b/crates/remote/.sqlx/query-3ba7efc786500c8a72dec5fb0f76b66da861b8ca8905080ef70a16943e97f004.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n id AS \"id!\",\n organization_id AS \"organization_id!: Uuid\",\n project_id AS \"project_id!\",\n creator_user_id AS \"creator_user_id?: Uuid\",\n assignee_user_id AS \"assignee_user_id?: Uuid\",\n deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n title AS \"title!\",\n description AS \"description?\",\n status AS \"status!: TaskStatus\",\n version AS \"version!\",\n deleted_at AS \"deleted_at?\",\n shared_at AS \"shared_at?\",\n created_at AS \"created_at!\",\n updated_at AS \"updated_at!\"\n FROM shared_tasks\n WHERE id = $1\n AND deleted_at IS NULL\n ", + "query": "\n SELECT\n id AS \"id!\",\n organization_id AS \"organization_id!: Uuid\",\n project_id AS \"project_id!\",\n creator_user_id AS \"creator_user_id?: Uuid\",\n assignee_user_id AS \"assignee_user_id?: Uuid\",\n deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n title AS \"title!\",\n description AS \"description?\",\n status AS \"status!: TaskStatus\",\n deleted_at AS \"deleted_at?\",\n shared_at AS \"shared_at?\",\n created_at AS \"created_at!\",\n updated_at AS \"updated_at!\"\n FROM shared_tasks\n WHERE id = $1\n AND deleted_at IS NULL\n ", "describe": { "columns": [ { @@ -52,8 +52,8 @@ "kind": { "Enum": [ "todo", - "in-progress", - "in-review", + "inprogress", + "inreview", "done", "cancelled" ] @@ -63,26 +63,21 @@ }, { "ordinal": 9, - "name": "version!", - "type_info": "Int8" - }, - { - "ordinal": 10, "name": "deleted_at?", "type_info": "Timestamptz" }, { - "ordinal": 11, + "ordinal": 10, "name": "shared_at?", "type_info": "Timestamptz" }, { - "ordinal": 12, + "ordinal": 11, "name": "created_at!", "type_info": "Timestamptz" }, { - "ordinal": 13, + "ordinal": 12, "name": "updated_at!", "type_info": "Timestamptz" } @@ -102,12 +97,11 @@ false, true, false, - false, true, true, false, false ] }, - "hash": "2a9a7c649ededf8772f750bb42c5144f4ab5e74dc905fb8a63340f09fd55a3d7" + "hash": "3ba7efc786500c8a72dec5fb0f76b66da861b8ca8905080ef70a16943e97f004" } diff --git a/crates/remote/.sqlx/query-4153afb5c59d76df7c880d2f427cdba11d2eaf2fe26193043947a45bcda46f45.json b/crates/remote/.sqlx/query-4153afb5c59d76df7c880d2f427cdba11d2eaf2fe26193043947a45bcda46f45.json deleted file mode 100644 index 4e29553a..00000000 --- a/crates/remote/.sqlx/query-4153afb5c59d76df7c880d2f427cdba11d2eaf2fe26193043947a45bcda46f45.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT st.id AS \"id!: Uuid\"\n FROM shared_tasks st\n WHERE st.project_id = $1\n AND st.deleted_at IS NOT NULL\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!: Uuid", - "type_info": "Uuid" - } - ], - "parameters": { - "Left": [ - "Uuid" - ] - }, - "nullable": [ - false - ] - }, - "hash": "4153afb5c59d76df7c880d2f427cdba11d2eaf2fe26193043947a45bcda46f45" -} diff --git a/crates/remote/.sqlx/query-4aaf14d8e25078fff3ceca2b2b1e2888403f398fba3048fbc582ec24c4c5dbf7.json b/crates/remote/.sqlx/query-4aaf14d8e25078fff3ceca2b2b1e2888403f398fba3048fbc582ec24c4c5dbf7.json new file mode 100644 index 00000000..30368939 --- /dev/null +++ b/crates/remote/.sqlx/query-4aaf14d8e25078fff3ceca2b2b1e2888403f398fba3048fbc582ec24c4c5dbf7.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT DISTINCT\n u.id as \"user_id\",\n u.first_name as \"first_name\",\n u.last_name as \"last_name\",\n u.username as \"username\"\n FROM shared_tasks st\n INNER JOIN users u ON u.id = st.assignee_user_id\n WHERE st.project_id = $1\n AND st.assignee_user_id IS NOT NULL\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "first_name", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "last_name", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "username", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + true, + true, + true + ] + }, + "hash": "4aaf14d8e25078fff3ceca2b2b1e2888403f398fba3048fbc582ec24c4c5dbf7" +} diff --git a/crates/remote/.sqlx/query-814e3c0507a86c04008e08104176c3c552833f518b2e880e649ad7fc10c0721c.json b/crates/remote/.sqlx/query-814e3c0507a86c04008e08104176c3c552833f518b2e880e649ad7fc10c0721c.json deleted file mode 100644 index b5d63c22..00000000 --- a/crates/remote/.sqlx/query-814e3c0507a86c04008e08104176c3c552833f518b2e880e649ad7fc10c0721c.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH next AS (\n INSERT INTO project_activity_counters AS counters (project_id, last_seq)\n VALUES ($1, 1)\n ON CONFLICT (project_id)\n DO UPDATE SET last_seq = counters.last_seq + 1\n RETURNING last_seq\n )\n INSERT INTO activity (\n project_id,\n seq,\n assignee_user_id,\n event_type,\n payload\n )\n SELECT $1, next.last_seq, $2, $3, $4\n FROM next\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Uuid", - "Uuid", - "Text", - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "814e3c0507a86c04008e08104176c3c552833f518b2e880e649ad7fc10c0721c" -} diff --git a/crates/remote/.sqlx/query-872d77e34d06bc036a07e9b2330166a2e0bedf34db5bceb3e6e576f1e07f6414.json b/crates/remote/.sqlx/query-872d77e34d06bc036a07e9b2330166a2e0bedf34db5bceb3e6e576f1e07f6414.json new file mode 100644 index 00000000..27647ac4 --- /dev/null +++ b/crates/remote/.sqlx/query-872d77e34d06bc036a07e9b2330166a2e0bedf34db5bceb3e6e576f1e07f6414.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT t.id\n FROM shared_tasks t\n INNER JOIN organization_member_metadata om ON t.organization_id = om.organization_id\n WHERE t.id = ANY($1)\n AND t.deleted_at IS NULL\n AND om.user_id = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "UuidArray", + "Uuid" + ] + }, + "nullable": [ + false + ] + }, + "hash": "872d77e34d06bc036a07e9b2330166a2e0bedf34db5bceb3e6e576f1e07f6414" +} diff --git a/crates/remote/.sqlx/query-a0fef73e10f2f7bba67f740aef62e43fb8e4678833be58e361d7b90912fa9883.json b/crates/remote/.sqlx/query-a0fef73e10f2f7bba67f740aef62e43fb8e4678833be58e361d7b90912fa9883.json new file mode 100644 index 00000000..a09995f6 --- /dev/null +++ b/crates/remote/.sqlx/query-a0fef73e10f2f7bba67f740aef62e43fb8e4678833be58e361d7b90912fa9883.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT 1 AS v FROM shared_tasks WHERE \"organization_id\" = ANY($1)", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "v", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "UuidArray" + ] + }, + "nullable": [ + null + ] + }, + "hash": "a0fef73e10f2f7bba67f740aef62e43fb8e4678833be58e361d7b90912fa9883" +} diff --git a/crates/remote/.sqlx/query-ae5afb54ca4316801148a697d31965c714f87b84840d93195443fa1df9375543.json b/crates/remote/.sqlx/query-ae5afb54ca4316801148a697d31965c714f87b84840d93195443fa1df9375543.json deleted file mode 100644 index 66c958a5..00000000 --- a/crates/remote/.sqlx/query-ae5afb54ca4316801148a697d31965c714f87b84840d93195443fa1df9375543.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT pg_try_advisory_lock(hashtextextended($1, 0))\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "pg_try_advisory_lock", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - null - ] - }, - "hash": "ae5afb54ca4316801148a697d31965c714f87b84840d93195443fa1df9375543" -} diff --git a/crates/remote/.sqlx/query-97132a5a3f0c0f9ca404d8517dd77a3e55a6933d8b7afad5296d9a63ec43d1e0.json b/crates/remote/.sqlx/query-af1c9ee18bd6dffa6e2b46959690ba0a1d1d545fea0b643e591b250a7160aa47.json similarity index 59% rename from crates/remote/.sqlx/query-97132a5a3f0c0f9ca404d8517dd77a3e55a6933d8b7afad5296d9a63ec43d1e0.json rename to crates/remote/.sqlx/query-af1c9ee18bd6dffa6e2b46959690ba0a1d1d545fea0b643e591b250a7160aa47.json index c23c2dbe..a6b5675d 100644 --- a/crates/remote/.sqlx/query-97132a5a3f0c0f9ca404d8517dd77a3e55a6933d8b7afad5296d9a63ec43d1e0.json +++ b/crates/remote/.sqlx/query-af1c9ee18bd6dffa6e2b46959690ba0a1d1d545fea0b643e591b250a7160aa47.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE shared_tasks AS t\n SET assignee_user_id = $2,\n version = t.version + 1\n WHERE t.id = $1\n AND t.version = COALESCE($4, t.version)\n AND ($3::uuid IS NULL OR t.assignee_user_id = $3::uuid)\n AND t.deleted_at IS NULL\n RETURNING\n t.id AS \"id!\",\n t.organization_id AS \"organization_id!: Uuid\",\n t.project_id AS \"project_id!\",\n t.creator_user_id AS \"creator_user_id?: Uuid\",\n t.assignee_user_id AS \"assignee_user_id?: Uuid\",\n t.deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n t.title AS \"title!\",\n t.description AS \"description?\",\n t.status AS \"status!: TaskStatus\",\n t.version AS \"version!\",\n t.deleted_at AS \"deleted_at?\",\n t.shared_at AS \"shared_at?\",\n t.created_at AS \"created_at!\",\n t.updated_at AS \"updated_at!\"\n ", + "query": "\n UPDATE shared_tasks AS t\n SET assignee_user_id = $2\n WHERE t.id = $1\n AND ($3::uuid IS NULL OR t.assignee_user_id = $3::uuid)\n AND t.deleted_at IS NULL\n RETURNING\n t.id AS \"id!\",\n t.organization_id AS \"organization_id!: Uuid\",\n t.project_id AS \"project_id!\",\n t.creator_user_id AS \"creator_user_id?: Uuid\",\n t.assignee_user_id AS \"assignee_user_id?: Uuid\",\n t.deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n t.title AS \"title!\",\n t.description AS \"description?\",\n t.status AS \"status!: TaskStatus\",\n t.deleted_at AS \"deleted_at?\",\n t.shared_at AS \"shared_at?\",\n t.created_at AS \"created_at!\",\n t.updated_at AS \"updated_at!\"\n ", "describe": { "columns": [ { @@ -52,8 +52,8 @@ "kind": { "Enum": [ "todo", - "in-progress", - "in-review", + "inprogress", + "inreview", "done", "cancelled" ] @@ -63,26 +63,21 @@ }, { "ordinal": 9, - "name": "version!", - "type_info": "Int8" - }, - { - "ordinal": 10, "name": "deleted_at?", "type_info": "Timestamptz" }, { - "ordinal": 11, + "ordinal": 10, "name": "shared_at?", "type_info": "Timestamptz" }, { - "ordinal": 12, + "ordinal": 11, "name": "created_at!", "type_info": "Timestamptz" }, { - "ordinal": 13, + "ordinal": 12, "name": "updated_at!", "type_info": "Timestamptz" } @@ -91,8 +86,7 @@ "Left": [ "Uuid", "Uuid", - "Uuid", - "Int8" + "Uuid" ] }, "nullable": [ @@ -105,12 +99,11 @@ false, true, false, - false, true, true, false, false ] }, - "hash": "97132a5a3f0c0f9ca404d8517dd77a3e55a6933d8b7afad5296d9a63ec43d1e0" + "hash": "af1c9ee18bd6dffa6e2b46959690ba0a1d1d545fea0b643e591b250a7160aa47" } diff --git a/crates/remote/.sqlx/query-ba222a6989447b36de700fa211af240fcf59603cf2bf50eb8c2be8a37fcfc565.json b/crates/remote/.sqlx/query-ba222a6989447b36de700fa211af240fcf59603cf2bf50eb8c2be8a37fcfc565.json deleted file mode 100644 index 10d47488..00000000 --- a/crates/remote/.sqlx/query-ba222a6989447b36de700fa211af240fcf59603cf2bf50eb8c2be8a37fcfc565.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT MAX(seq)\n FROM activity\n WHERE project_id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "max", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Uuid" - ] - }, - "nullable": [ - null - ] - }, - "hash": "ba222a6989447b36de700fa211af240fcf59603cf2bf50eb8c2be8a37fcfc565" -} diff --git a/crates/remote/.sqlx/query-c8aa60c6bfbdc7c471fec520a958d6718bc60876a28b92b49fe11169b23c2966.json b/crates/remote/.sqlx/query-c8aa60c6bfbdc7c471fec520a958d6718bc60876a28b92b49fe11169b23c2966.json deleted file mode 100644 index 0bf3aef6..00000000 --- a/crates/remote/.sqlx/query-c8aa60c6bfbdc7c471fec520a958d6718bc60876a28b92b49fe11169b23c2966.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT pg_advisory_unlock(hashtextextended($1, 0))\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "pg_advisory_unlock", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - null - ] - }, - "hash": "c8aa60c6bfbdc7c471fec520a958d6718bc60876a28b92b49fe11169b23c2966" -} diff --git a/crates/remote/.sqlx/query-13b1cf3d350af65f983aeab1e8c43faf3edc10c6403279f8450f2f9ae835cc18.json b/crates/remote/.sqlx/query-daa9b8b4b2d30296fc3c46fd25ba9e067577216bb58d6f75c6329ac7bcbb2fc8.json similarity index 82% rename from crates/remote/.sqlx/query-13b1cf3d350af65f983aeab1e8c43faf3edc10c6403279f8450f2f9ae835cc18.json rename to crates/remote/.sqlx/query-daa9b8b4b2d30296fc3c46fd25ba9e067577216bb58d6f75c6329ac7bcbb2fc8.json index 4191c30d..724444ae 100644 --- a/crates/remote/.sqlx/query-13b1cf3d350af65f983aeab1e8c43faf3edc10c6403279f8450f2f9ae835cc18.json +++ b/crates/remote/.sqlx/query-daa9b8b4b2d30296fc3c46fd25ba9e067577216bb58d6f75c6329ac7bcbb2fc8.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO shared_tasks (\n organization_id,\n project_id,\n creator_user_id,\n assignee_user_id,\n title,\n description,\n shared_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, NOW())\n RETURNING id AS \"id!\",\n organization_id AS \"organization_id!: Uuid\",\n project_id AS \"project_id!\",\n creator_user_id AS \"creator_user_id?: Uuid\",\n assignee_user_id AS \"assignee_user_id?: Uuid\",\n deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n title AS \"title!\",\n description AS \"description?\",\n status AS \"status!: TaskStatus\",\n version AS \"version!\",\n deleted_at AS \"deleted_at?\",\n shared_at AS \"shared_at?\",\n created_at AS \"created_at!\",\n updated_at AS \"updated_at!\"\n ", + "query": "\n INSERT INTO shared_tasks (\n organization_id,\n project_id,\n creator_user_id,\n assignee_user_id,\n title,\n description,\n shared_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, NOW())\n RETURNING id AS \"id!\",\n organization_id AS \"organization_id!: Uuid\",\n project_id AS \"project_id!\",\n creator_user_id AS \"creator_user_id?: Uuid\",\n assignee_user_id AS \"assignee_user_id?: Uuid\",\n deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n title AS \"title!\",\n description AS \"description?\",\n status AS \"status!: TaskStatus\",\n deleted_at AS \"deleted_at?\",\n shared_at AS \"shared_at?\",\n created_at AS \"created_at!\",\n updated_at AS \"updated_at!\"\n ", "describe": { "columns": [ { @@ -52,8 +52,8 @@ "kind": { "Enum": [ "todo", - "in-progress", - "in-review", + "inprogress", + "inreview", "done", "cancelled" ] @@ -63,26 +63,21 @@ }, { "ordinal": 9, - "name": "version!", - "type_info": "Int8" - }, - { - "ordinal": 10, "name": "deleted_at?", "type_info": "Timestamptz" }, { - "ordinal": 11, + "ordinal": 10, "name": "shared_at?", "type_info": "Timestamptz" }, { - "ordinal": 12, + "ordinal": 11, "name": "created_at!", "type_info": "Timestamptz" }, { - "ordinal": 13, + "ordinal": 12, "name": "updated_at!", "type_info": "Timestamptz" } @@ -107,12 +102,11 @@ false, true, false, - false, true, true, false, false ] }, - "hash": "13b1cf3d350af65f983aeab1e8c43faf3edc10c6403279f8450f2f9ae835cc18" + "hash": "daa9b8b4b2d30296fc3c46fd25ba9e067577216bb58d6f75c6329ac7bcbb2fc8" } diff --git a/crates/remote/.sqlx/query-fe740e5984676e9bdbdd36e9f090b00b952a31f89ae649046f3d97a9fa4913bf.json b/crates/remote/.sqlx/query-fe740e5984676e9bdbdd36e9f090b00b952a31f89ae649046f3d97a9fa4913bf.json deleted file mode 100644 index bedb1a1f..00000000 --- a/crates/remote/.sqlx/query-fe740e5984676e9bdbdd36e9f090b00b952a31f89ae649046f3d97a9fa4913bf.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT format('%I.%I', n.nspname, c.relname) AS qualified_name,\n split_part(\n split_part(pg_get_expr(c.relpartbound, c.oid), ' TO (''', 2),\n ''')', 1\n )::timestamptz AS upper_bound\n FROM pg_partition_tree('activity') pt\n JOIN pg_class c ON c.oid = pt.relid\n JOIN pg_namespace n ON n.oid = c.relnamespace\n WHERE pt.isleaf\n AND c.relname ~ '^activity_p_\\d{8}$'\n AND split_part(\n split_part(pg_get_expr(c.relpartbound, c.oid), ' TO (''', 2),\n ''')', 1\n )::timestamptz <= NOW() - INTERVAL '2 days'\n ORDER BY upper_bound\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "qualified_name", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "upper_bound", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null, - null - ] - }, - "hash": "fe740e5984676e9bdbdd36e9f090b00b952a31f89ae649046f3d97a9fa4913bf" -} diff --git a/crates/remote/.sqlx/query-ff9b35a31210dbddd237f4234bec1411b5aa1b0be986fbe5a8ee21e6771222f2.json b/crates/remote/.sqlx/query-ff9b35a31210dbddd237f4234bec1411b5aa1b0be986fbe5a8ee21e6771222f2.json deleted file mode 100644 index c20ad5eb..00000000 --- a/crates/remote/.sqlx/query-ff9b35a31210dbddd237f4234bec1411b5aa1b0be986fbe5a8ee21e6771222f2.json +++ /dev/null @@ -1,137 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n st.id AS \"id!: Uuid\",\n st.organization_id AS \"organization_id!: Uuid\",\n st.project_id AS \"project_id!: Uuid\",\n st.creator_user_id AS \"creator_user_id?: Uuid\",\n st.assignee_user_id AS \"assignee_user_id?: Uuid\",\n st.deleted_by_user_id AS \"deleted_by_user_id?: Uuid\",\n st.title AS \"title!\",\n st.description AS \"description?\",\n st.status AS \"status!: TaskStatus\",\n st.version AS \"version!\",\n st.deleted_at AS \"deleted_at?\",\n st.shared_at AS \"shared_at?\",\n st.created_at AS \"created_at!\",\n st.updated_at AS \"updated_at!\",\n u.id AS \"user_id?: Uuid\",\n u.first_name AS \"user_first_name?\",\n u.last_name AS \"user_last_name?\",\n u.username AS \"user_username?\"\n FROM shared_tasks st\n LEFT JOIN users u ON st.assignee_user_id = u.id\n WHERE st.project_id = $1\n AND st.deleted_at IS NULL\n ORDER BY st.updated_at DESC\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!: Uuid", - "type_info": "Uuid" - }, - { - "ordinal": 1, - "name": "organization_id!: Uuid", - "type_info": "Uuid" - }, - { - "ordinal": 2, - "name": "project_id!: Uuid", - "type_info": "Uuid" - }, - { - "ordinal": 3, - "name": "creator_user_id?: Uuid", - "type_info": "Uuid" - }, - { - "ordinal": 4, - "name": "assignee_user_id?: Uuid", - "type_info": "Uuid" - }, - { - "ordinal": 5, - "name": "deleted_by_user_id?: Uuid", - "type_info": "Uuid" - }, - { - "ordinal": 6, - "name": "title!", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "description?", - "type_info": "Text" - }, - { - "ordinal": 8, - "name": "status!: TaskStatus", - "type_info": { - "Custom": { - "name": "task_status", - "kind": { - "Enum": [ - "todo", - "in-progress", - "in-review", - "done", - "cancelled" - ] - } - } - } - }, - { - "ordinal": 9, - "name": "version!", - "type_info": "Int8" - }, - { - "ordinal": 10, - "name": "deleted_at?", - "type_info": "Timestamptz" - }, - { - "ordinal": 11, - "name": "shared_at?", - "type_info": "Timestamptz" - }, - { - "ordinal": 12, - "name": "created_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 13, - "name": "updated_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 14, - "name": "user_id?: Uuid", - "type_info": "Uuid" - }, - { - "ordinal": 15, - "name": "user_first_name?", - "type_info": "Text" - }, - { - "ordinal": 16, - "name": "user_last_name?", - "type_info": "Text" - }, - { - "ordinal": 17, - "name": "user_username?", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Uuid" - ] - }, - "nullable": [ - false, - false, - false, - true, - true, - true, - false, - true, - false, - false, - true, - true, - false, - false, - false, - true, - true, - true - ] - }, - "hash": "ff9b35a31210dbddd237f4234bec1411b5aa1b0be986fbe5a8ee21e6771222f2" -} diff --git a/crates/remote/Cargo.toml b/crates/remote/Cargo.toml index 608b257b..7498a6c6 100644 --- a/crates/remote/Cargo.toml +++ b/crates/remote/Cargo.toml @@ -12,7 +12,7 @@ aes-gcm = "0.10" chrono = { version = "0.4", features = ["serde"] } futures = "0.3" async-trait = "0.1" -reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] } +reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls", "stream"] } secrecy = "0.10.3" sentry = { version = "0.41.0", features = ["anyhow", "backtrace", "panic", "debug-images"] } sentry-tracing = { version = "0.41.0", features = ["backtrace"] } @@ -26,6 +26,7 @@ tracing = { workspace = true } tracing-subscriber = { workspace = true } tracing-error = "0.2" thiserror = { workspace = true } +ts-rs = { workspace = true } utils = { path = "../utils" } uuid = { version = "1", features = ["serde", "v4"] } jsonwebtoken = "9" diff --git a/crates/remote/docker-compose.yml b/crates/remote/docker-compose.yml index 6917d9d6..b1738433 100644 --- a/crates/remote/docker-compose.yml +++ b/crates/remote/docker-compose.yml @@ -1,6 +1,7 @@ services: remote-db: image: postgres:16-alpine + command: ["postgres", "-c", "wal_level=logical"] environment: POSTGRES_DB: remote POSTGRES_USER: remote @@ -16,6 +17,23 @@ services: ports: - "5432:5432" + electric: + image: electricsql/electric:latest + working_dir: /app + environment: + DATABASE_URL: postgresql://electric_sync:${ELECTRIC_ROLE_PASSWORD:?set in .env.remote}@remote-db:5432/remote?sslmode=disable + PG_PROXY_PORT: 65432 + LOGICAL_PUBLISHER_HOST: electric + AUTH_MODE: insecure + ELECTRIC_INSECURE: true + ELECTRIC_MANUAL_TABLE_PUBLISHING: true + ELECTRIC_USAGE_REPORTING: false + volumes: + - electric-data:/app/persistent + depends_on: + remote-db: + condition: service_healthy + remote-server: build: context: ../.. @@ -23,10 +41,12 @@ services: depends_on: remote-db: condition: service_healthy + electric: + condition: service_started environment: SERVER_DATABASE_URL: postgres://remote:remote@remote-db:5432/remote SERVER_LISTEN_ADDR: 0.0.0.0:8081 - SERVER_ACTIVITY_CHANNEL: activity + ELECTRIC_URL: http://electric:3000 GITHUB_OAUTH_CLIENT_ID: ${GITHUB_OAUTH_CLIENT_ID:?set in .env.remote} GITHUB_OAUTH_CLIENT_SECRET: ${GITHUB_OAUTH_CLIENT_SECRET:?set in .env.remote} GOOGLE_OAUTH_CLIENT_ID: ${GOOGLE_OAUTH_CLIENT_ID:?set in .env.remote} @@ -36,9 +56,11 @@ services: SERVER_PUBLIC_BASE_URL: http://localhost:3000 VITE_APP_BASE_URL: http://localhost:3000 VITE_API_BASE_URL: http://localhost:3000 + ELECTRIC_ROLE_PASSWORD: ${ELECTRIC_ROLE_PASSWORD:?set in .env.remote} ports: - "127.0.0.1:3000:8081" restart: unless-stopped volumes: remote-db-data: + electric-data: diff --git a/crates/remote/migrations/20251127000000_electric_support.sql b/crates/remote/migrations/20251127000000_electric_support.sql new file mode 100644 index 00000000..e6647cc3 --- /dev/null +++ b/crates/remote/migrations/20251127000000_electric_support.sql @@ -0,0 +1,21 @@ +CREATE ROLE electric_sync WITH LOGIN REPLICATION; + +GRANT CONNECT ON DATABASE remote TO electric_sync; +GRANT USAGE ON SCHEMA public TO electric_sync; + +CREATE PUBLICATION electric_publication_default; + +CREATE OR REPLACE FUNCTION electric_sync_table(p_schema text, p_table text) +RETURNS void +LANGUAGE plpgsql +AS $$ +DECLARE + qualified text := format('%I.%I', p_schema, p_table); +BEGIN + EXECUTE format('ALTER TABLE %s REPLICA IDENTITY FULL', qualified); + EXECUTE format('GRANT SELECT ON TABLE %s TO electric_sync', qualified); + EXECUTE format('ALTER PUBLICATION %I ADD TABLE %s', 'electric_publication_default', qualified); +END; +$$; + +SELECT electric_sync_table('public', 'shared_tasks'); diff --git a/crates/remote/migrations/20251201000000_drop_unused_activity_and_columns.sql b/crates/remote/migrations/20251201000000_drop_unused_activity_and_columns.sql new file mode 100644 index 00000000..86d3aa1d --- /dev/null +++ b/crates/remote/migrations/20251201000000_drop_unused_activity_and_columns.sql @@ -0,0 +1,9 @@ +-- Drop activity feed tables and functions +DROP TABLE IF EXISTS activity CASCADE; +DROP TABLE IF EXISTS project_activity_counters; +DROP FUNCTION IF EXISTS ensure_activity_partition; +DROP FUNCTION IF EXISTS activity_notify; + +-- Drop unused columns from shared_tasks +ALTER TABLE shared_tasks DROP COLUMN IF EXISTS version; +ALTER TABLE shared_tasks DROP COLUMN IF EXISTS last_event_seq; diff --git a/crates/remote/migrations/20251201010000_unify_task_status_enums.sql b/crates/remote/migrations/20251201010000_unify_task_status_enums.sql new file mode 100644 index 00000000..90dd7467 --- /dev/null +++ b/crates/remote/migrations/20251201010000_unify_task_status_enums.sql @@ -0,0 +1,2 @@ +ALTER TYPE task_status RENAME VALUE 'in-progress' TO 'inprogress'; +ALTER TYPE task_status RENAME VALUE 'in-review' TO 'inreview'; diff --git a/crates/remote/src/activity/broker.rs b/crates/remote/src/activity/broker.rs deleted file mode 100644 index 42e2fc4b..00000000 --- a/crates/remote/src/activity/broker.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std::{ - hash::{Hash, Hasher}, - pin::Pin, - sync::Arc, -}; - -use chrono::{DateTime, Utc}; -use futures::{Stream, StreamExt, future}; -use serde::{Deserialize, Serialize}; -use tokio::sync::broadcast; -use tokio_stream::wrappers::{BroadcastStream, errors::BroadcastStreamRecvError}; - -#[derive(Debug, Serialize, Deserialize)] -pub struct ActivityResponse { - pub data: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ActivityEvent { - pub seq: i64, - pub event_id: uuid::Uuid, - pub project_id: uuid::Uuid, - pub event_type: String, - pub created_at: DateTime, - pub payload: Option, -} - -impl ActivityEvent { - pub fn new( - seq: i64, - event_id: uuid::Uuid, - project_id: uuid::Uuid, - event_type: String, - created_at: DateTime, - payload: Option, - ) -> Self { - Self { - seq, - event_id, - project_id, - event_type, - created_at, - payload, - } - } -} - -#[derive(Clone)] -pub struct ActivityBroker { - shards: Arc>>, -} - -pub type ActivityStream = - Pin> + Send + 'static>>; - -impl ActivityBroker { - /// Shard broadcast senders to keep busy organisations from evicting everyone else's events. - pub fn new(shard_count: usize, shard_capacity: usize) -> Self { - let shard_count = shard_count.max(1); - let shard_capacity = shard_capacity.max(1); - let shards = (0..shard_count) - .map(|_| { - let (sender, _receiver) = broadcast::channel(shard_capacity); - sender - }) - .collect(); - - Self { - shards: Arc::new(shards), - } - } - - pub fn subscribe(&self, project_id: uuid::Uuid) -> ActivityStream { - let index = self.shard_index(&project_id); - let receiver = self.shards[index].subscribe(); - - let stream = BroadcastStream::new(receiver).filter_map(move |item| { - future::ready(match item { - Ok(event) if event.project_id == project_id => Some(Ok(event)), - Ok(_) => None, - Err(err) => Some(Err(err)), - }) - }); - - Box::pin(stream) - } - - pub fn publish(&self, event: ActivityEvent) { - let index = self.shard_index(&event.project_id); - if let Err(error) = self.shards[index].send(event) { - tracing::debug!(?error, "no subscribers for activity event"); - } - } - - fn shard_index(&self, project_id: &uuid::Uuid) -> usize { - let mut hasher = std::collections::hash_map::DefaultHasher::new(); - project_id.hash(&mut hasher); - (hasher.finish() as usize) % self.shards.len() - } -} - -impl Default for ActivityBroker { - fn default() -> Self { - Self::new(16, 512) - } -} diff --git a/crates/remote/src/activity/mod.rs b/crates/remote/src/activity/mod.rs deleted file mode 100644 index 6f1b4397..00000000 --- a/crates/remote/src/activity/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod broker; - -pub use broker::{ActivityBroker, ActivityEvent, ActivityResponse, ActivityStream}; diff --git a/crates/remote/src/app.rs b/crates/remote/src/app.rs index a03b0c75..4cdeedc5 100644 --- a/crates/remote/src/app.rs +++ b/crates/remote/src/app.rs @@ -1,11 +1,11 @@ use std::{net::SocketAddr, sync::Arc}; use anyhow::{Context, bail}; +use secrecy::ExposeSecret; use tracing::instrument; use crate::{ AppState, - activity::ActivityBroker, auth::{ GitHubOAuthProvider, GoogleOAuthProvider, JwtService, OAuthHandoffService, OAuthTokenValidator, ProviderRegistry, @@ -22,7 +22,7 @@ impl Server { #[instrument( name = "remote_server", skip(config), - fields(listen_addr = %config.listen_addr, activity_channel = %config.activity_channel) + fields(listen_addr = %config.listen_addr) )] pub async fn run(config: RemoteServerConfig) -> anyhow::Result<()> { let pool = db::create_pool(&config.database_url) @@ -33,12 +33,12 @@ impl Server { .await .context("failed to run database migrations")?; - db::maintenance::spawn_activity_partition_maintenance(pool.clone()); + if let Some(password) = config.electric_role_password.as_ref() { + db::ensure_electric_role_password(&pool, password.expose_secret()) + .await + .context("failed to set electric role password")?; + } - let broker = ActivityBroker::new( - config.activity_broadcast_shards, - config.activity_broadcast_capacity, - ); let auth_config = config.auth.clone(); let jwt = Arc::new(JwtService::new(auth_config.jwt_secret().clone())); @@ -84,21 +84,18 @@ impl Server { ) })?; + let http_client = reqwest::Client::new(); let state = AppState::new( pool.clone(), - broker.clone(), config.clone(), jwt, handoff_service, oauth_token_validator, mailer, server_public_base_url, + http_client, ); - let listener = - db::ActivityListener::new(pool.clone(), broker, config.activity_channel.clone()); - tokio::spawn(listener.run()); - let router = routes::router(state); let addr: SocketAddr = config .listen_addr diff --git a/crates/remote/src/config.rs b/crates/remote/src/config.rs index 43052074..528eec96 100644 --- a/crates/remote/src/config.rs +++ b/crates/remote/src/config.rs @@ -4,26 +4,15 @@ use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD}; use secrecy::SecretString; use thiserror::Error; -// Default activity items returned in a single query -const DEFAULT_ACTIVITY_DEFAULT_LIMIT: i64 = 200; -// Max activity items that can be requested in a single query -const DEFAULT_ACTIVITY_MAX_LIMIT: i64 = 500; -const DEFAULT_ACTIVITY_BROADCAST_SHARDS: usize = 16; -const DEFAULT_ACTIVITY_BROADCAST_CAPACITY: usize = 512; -const DEFAULT_ACTIVITY_CATCHUP_BATCH_SIZE: i64 = 100; - #[derive(Debug, Clone)] pub struct RemoteServerConfig { pub database_url: String, pub listen_addr: String, pub server_public_base_url: Option, - pub activity_channel: String, - pub activity_default_limit: i64, - pub activity_max_limit: i64, - pub activity_broadcast_shards: usize, - pub activity_broadcast_capacity: usize, - pub activity_catchup_batch_size: i64, pub auth: AuthConfig, + pub electric_url: String, + pub electric_secret: Option, + pub electric_role_password: Option, } #[derive(Debug, Error)] @@ -47,59 +36,31 @@ impl RemoteServerConfig { let server_public_base_url = env::var("SERVER_PUBLIC_BASE_URL").ok(); - let activity_channel = - env::var("SERVER_ACTIVITY_CHANNEL").unwrap_or_else(|_| "activity".to_string()); - - let activity_default_limit = DEFAULT_ACTIVITY_DEFAULT_LIMIT; - let activity_max_limit = DEFAULT_ACTIVITY_MAX_LIMIT; - - let activity_broadcast_shards = get_numeric_env_var( - "SERVER_ACTIVITY_BROADCAST_SHARDS", - DEFAULT_ACTIVITY_BROADCAST_SHARDS, - )? - .max(1); - - let activity_broadcast_capacity = get_numeric_env_var( - "SERVER_ACTIVITY_BROADCAST_CAPACITY", - DEFAULT_ACTIVITY_BROADCAST_CAPACITY, - )? - .max(1); - - let activity_catchup_batch_size = get_numeric_env_var( - "SERVER_ACTIVITY_CATCHUP_BATCH_SIZE", - DEFAULT_ACTIVITY_CATCHUP_BATCH_SIZE, - )? - .max(1); - let auth = AuthConfig::from_env()?; + let electric_url = + env::var("ELECTRIC_URL").map_err(|_| ConfigError::MissingVar("ELECTRIC_URL"))?; + + let electric_secret = env::var("ELECTRIC_SECRET") + .map(|s| SecretString::new(s.into())) + .ok(); + + let electric_role_password = env::var("ELECTRIC_ROLE_PASSWORD") + .ok() + .map(|s| SecretString::new(s.into())); + Ok(Self { database_url, listen_addr, server_public_base_url, - activity_channel, - activity_default_limit, - activity_max_limit, - activity_broadcast_shards, - activity_broadcast_capacity, - activity_catchup_batch_size, auth, + electric_url, + electric_secret, + electric_role_password, }) } } -fn get_numeric_env_var( - var_name: &'static str, - default: T, -) -> Result { - match env::var(var_name) { - Ok(value) => value - .parse::() - .map_err(|_| ConfigError::InvalidVar(var_name)), - Err(_) => Ok(default), - } -} - #[derive(Debug, Clone)] pub struct OAuthProviderConfig { client_id: String, diff --git a/crates/remote/src/db/activity.rs b/crates/remote/src/db/activity.rs deleted file mode 100644 index f4ed3462..00000000 --- a/crates/remote/src/db/activity.rs +++ /dev/null @@ -1,95 +0,0 @@ -use chrono::{DateTime, Utc}; -use sqlx::PgPool; -use uuid::Uuid; - -use crate::activity::ActivityEvent; - -pub struct ActivityRepository<'a> { - pool: &'a PgPool, -} - -impl<'a> ActivityRepository<'a> { - pub fn new(pool: &'a PgPool) -> Self { - Self { pool } - } - - pub async fn fetch_since( - &self, - project_id: Uuid, - after_seq: Option, - limit: i64, - ) -> Result, sqlx::Error> { - let rows = sqlx::query_as::<_, ActivityRow>( - r#" - SELECT seq, - event_id, - project_id, - event_type, - created_at, - payload - FROM activity - WHERE project_id = $1 - AND ($2::bigint IS NULL OR seq > $2) - ORDER BY seq ASC - LIMIT $3 - "#, - ) - .bind(project_id) - .bind(after_seq) - .bind(limit) - .fetch_all(self.pool) - .await?; - - Ok(rows.into_iter().map(ActivityRow::into_event).collect()) - } - - pub async fn fetch_by_seq( - &self, - project_id: Uuid, - seq: i64, - ) -> Result, sqlx::Error> { - let row = sqlx::query_as::<_, ActivityRow>( - r#" - SELECT seq, - event_id, - project_id, - event_type, - created_at, - payload - FROM activity - WHERE project_id = $1 - AND seq = $2 - LIMIT 1 - "#, - ) - .bind(project_id) - .bind(seq) - .fetch_optional(self.pool) - .await?; - - Ok(row.map(ActivityRow::into_event)) - } -} - -#[derive(sqlx::FromRow)] -struct ActivityRow { - seq: i64, - event_id: Uuid, - project_id: Uuid, - event_type: String, - created_at: DateTime, - payload: serde_json::Value, -} - -impl ActivityRow { - fn into_event(self) -> ActivityEvent { - ActivityEvent::new( - self.seq, - self.event_id, - self.project_id, - self.event_type, - self.created_at, - Some(self.payload), - ) - } -} diff --git a/crates/remote/src/db/listener.rs b/crates/remote/src/db/listener.rs deleted file mode 100644 index 561d0b48..00000000 --- a/crates/remote/src/db/listener.rs +++ /dev/null @@ -1,108 +0,0 @@ -use std::time::Duration; - -use anyhow::Context; -use serde::Deserialize; -use sqlx::{PgPool, postgres::PgListener}; -use tokio::time::sleep; -use tracing::instrument; -use uuid::Uuid; - -use crate::{activity::ActivityBroker, db::activity::ActivityRepository}; - -pub struct ActivityListener { - pool: PgPool, - broker: ActivityBroker, - channel: String, -} - -impl ActivityListener { - pub fn new(pool: PgPool, broker: ActivityBroker, channel: String) -> Self { - Self { - pool, - broker, - channel, - } - } - - #[instrument( - name = "activity.listener", - skip(self), - fields(channel = %self.channel) - )] - pub async fn run(self) { - let mut backoff = Duration::from_secs(1); - let max_backoff = Duration::from_secs(30); - - let pool = self.pool; - let broker = self.broker; - let channel = self.channel; - - loop { - match listen_loop(&pool, &broker, &channel).await { - Ok(_) => { - backoff = Duration::from_secs(1); - } - Err(error) => { - tracing::error!(?error, ?backoff, "activity listener error; retrying"); - sleep(backoff).await; - backoff = (backoff * 2).min(max_backoff); - } - } - } - } -} - -#[instrument( - name = "activity.listen_loop", - skip(pool, broker), - fields(channel = %channel) -)] -async fn listen_loop(pool: &PgPool, broker: &ActivityBroker, channel: &str) -> anyhow::Result<()> { - let mut listener = PgListener::connect_with(pool) - .await - .context("failed to create LISTEN connection")?; - listener - .listen(channel) - .await - .with_context(|| format!("failed to LISTEN on channel {channel}"))?; - - loop { - let notification = listener - .recv() - .await - .context("failed to receive LISTEN notification")?; - - let payload: NotificationEnvelope = serde_json::from_str(notification.payload()) - .with_context(|| format!("invalid notification payload: {}", notification.payload()))?; - - tracing::trace!(%payload.seq, project_id = %payload.project_id, "received activity notification"); - - let project_uuid = payload - .project_id - .parse::() - .with_context(|| format!("invalid project_id UUID: {}", payload.project_id))?; - - let event = match ActivityRepository::new(pool) - .fetch_by_seq(project_uuid, payload.seq) - .await - { - Ok(Some(event)) => event, - Ok(None) => { - tracing::warn!(seq = payload.seq, project_id = %payload.project_id, "activity row missing for notification"); - continue; - } - Err(error) => { - tracing::error!(?error, seq = payload.seq, project_id = %payload.project_id, "failed to fetch activity payload"); - continue; - } - }; - - broker.publish(event); - } -} - -#[derive(Debug, Deserialize)] -struct NotificationEnvelope { - seq: i64, - project_id: String, -} diff --git a/crates/remote/src/db/maintenance.rs b/crates/remote/src/db/maintenance.rs deleted file mode 100644 index 717afe86..00000000 --- a/crates/remote/src/db/maintenance.rs +++ /dev/null @@ -1,159 +0,0 @@ -use std::{sync::OnceLock, time::Duration}; - -use chrono::{Duration as ChronoDuration, NaiveTime, TimeZone, Utc}; -use sqlx::{PgPool, error::DatabaseError}; -use tokio::time::sleep; -use tracing::{error, info, warn}; - -const PRUNE_LOCK_KEY: &str = "vibe_kanban_activity_retention_v1"; -static PROVISION_TIME: OnceLock = OnceLock::new(); -static PRUNE_TIME: OnceLock = OnceLock::new(); - -fn provision_time() -> NaiveTime { - *PROVISION_TIME.get_or_init(|| NaiveTime::from_hms_opt(0, 10, 0).expect("valid time")) -} - -fn prune_time() -> NaiveTime { - *PRUNE_TIME.get_or_init(|| NaiveTime::from_hms_opt(1, 30, 0).expect("valid time")) -} - -pub fn spawn_activity_partition_maintenance(pool: PgPool) { - let creation_pool = pool.clone(); - tokio::spawn(async move { - if let Err(err) = ensure_future_partitions_with_pool(&creation_pool).await { - error!(error = ?err, "initial activity partition provisioning failed"); - } - - loop { - sleep(duration_until(provision_time())).await; - if let Err(err) = ensure_future_partitions_with_pool(&creation_pool).await { - error!(error = ?err, "scheduled partition provisioning failed"); - } - } - }); - - tokio::spawn(async move { - if let Err(err) = prune_old_partitions(&pool).await { - error!(error = ?err, "initial activity partition pruning failed"); - } - - loop { - sleep(duration_until(prune_time())).await; - if let Err(err) = prune_old_partitions(&pool).await { - error!(error = ?err, "scheduled partition pruning failed"); - } - } - }); -} - -fn duration_until(target_time: NaiveTime) -> Duration { - let now = Utc::now(); - - let today = now.date_naive(); - let mut next = today.and_time(target_time); - - if now.time() >= target_time { - next = (today + ChronoDuration::days(1)).and_time(target_time); - } - - let next_dt = Utc.from_utc_datetime(&next); - (next_dt - now) - .to_std() - .unwrap_or_else(|_| Duration::from_secs(0)) -} - -async fn prune_old_partitions(pool: &PgPool) -> Result<(), sqlx::Error> { - let mut conn = pool.acquire().await?; - - let lock_acquired = sqlx::query_scalar!( - r#" - SELECT pg_try_advisory_lock(hashtextextended($1, 0)) - "#, - PRUNE_LOCK_KEY - ) - .fetch_one(&mut *conn) - .await? - .unwrap_or(false); - - if !lock_acquired { - warn!("skipping partition pruning because another worker holds the lock"); - return Ok(()); - } - - let result = async { - let partitions = sqlx::query!( - r#" - SELECT format('%I.%I', n.nspname, c.relname) AS qualified_name, - split_part( - split_part(pg_get_expr(c.relpartbound, c.oid), ' TO (''', 2), - ''')', 1 - )::timestamptz AS upper_bound - FROM pg_partition_tree('activity') pt - JOIN pg_class c ON c.oid = pt.relid - JOIN pg_namespace n ON n.oid = c.relnamespace - WHERE pt.isleaf - AND c.relname ~ '^activity_p_\d{8}$' - AND split_part( - split_part(pg_get_expr(c.relpartbound, c.oid), ' TO (''', 2), - ''')', 1 - )::timestamptz <= NOW() - INTERVAL '2 days' - ORDER BY upper_bound - "# - ) - .fetch_all(&mut *conn) - .await?; - - for partition in partitions { - if let Some(name) = partition.qualified_name { - let detach = format!("ALTER TABLE activity DETACH PARTITION {name} CONCURRENTLY"); - sqlx::query(&detach).execute(&mut *conn).await?; - - let drop = format!("DROP TABLE {name}"); - sqlx::query(&drop).execute(&mut *conn).await?; - - info!(partition = %name, "dropped activity partition"); - } - } - - Ok(()) - } - .await; - - let _ = sqlx::query_scalar!( - r#" - SELECT pg_advisory_unlock(hashtextextended($1, 0)) - "#, - PRUNE_LOCK_KEY - ) - .fetch_one(&mut *conn) - .await; - - result -} - -pub async fn ensure_future_partitions_with_pool(pool: &PgPool) -> Result<(), sqlx::Error> { - let mut conn = pool.acquire().await?; - ensure_future_partitions(&mut conn).await -} - -pub async fn ensure_future_partitions( - executor: &mut sqlx::PgConnection, -) -> Result<(), sqlx::Error> { - sqlx::query("SELECT ensure_activity_partition(NOW())") - .execute(&mut *executor) - .await?; - sqlx::query("SELECT ensure_activity_partition(NOW() + INTERVAL '24 hours')") - .execute(&mut *executor) - .await?; - sqlx::query("SELECT ensure_activity_partition(NOW() + INTERVAL '48 hours')") - .execute(&mut *executor) - .await?; - Ok(()) -} - -pub fn is_partition_missing_error(err: &(dyn DatabaseError + Send + Sync + 'static)) -> bool { - err.code() - .as_deref() - .is_some_and(|code| code.starts_with("23")) - && err.message().contains("no partition of relation") -} diff --git a/crates/remote/src/db/mod.rs b/crates/remote/src/db/mod.rs index b2bfe032..64afd66d 100644 --- a/crates/remote/src/db/mod.rs +++ b/crates/remote/src/db/mod.rs @@ -1,9 +1,6 @@ -pub mod activity; pub mod auth; pub mod identity_errors; pub mod invitations; -pub mod listener; -pub mod maintenance; pub mod oauth; pub mod oauth_accounts; pub mod organization_members; @@ -12,7 +9,6 @@ pub mod projects; pub mod tasks; pub mod users; -pub use listener::ActivityListener; use sqlx::{PgPool, Postgres, Transaction, migrate::MigrateError, postgres::PgPoolOptions}; pub(crate) type Tx<'a> = Transaction<'a, Postgres>; @@ -27,3 +23,21 @@ pub(crate) async fn create_pool(database_url: &str) -> Result Result<(), sqlx::Error> { + if password.is_empty() { + return Ok(()); + } + + // PostgreSQL doesn't support parameter binding for ALTER ROLE PASSWORD + // We need to escape the password properly and embed it directly in the SQL + let escaped_password = password.replace("'", "''"); + let sql = format!("ALTER ROLE electric_sync WITH PASSWORD '{escaped_password}'"); + + sqlx::query(&sql).execute(pool).await?; + + Ok(()) +} diff --git a/crates/remote/src/db/tasks.rs b/crates/remote/src/db/tasks.rs index 2f27ccfb..6bdb0f79 100644 --- a/crates/remote/src/db/tasks.rs +++ b/crates/remote/src/db/tasks.rs @@ -2,27 +2,21 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::PgPool; use thiserror::Error; +use ts_rs::TS; use uuid::Uuid; use super::{ - Tx, identity_errors::IdentityError, projects::{ProjectError, ProjectRepository}, users::{UserData, fetch_user}, }; -use crate::db::maintenance; - -pub struct BulkFetchResult { - pub tasks: Vec, - pub deleted_task_ids: Vec, - pub latest_seq: Option, -} pub const MAX_SHARED_TASK_TEXT_BYTES: usize = 50 * 1024; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] -#[serde(rename_all = "kebab-case")] -#[sqlx(type_name = "task_status", rename_all = "kebab-case")] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, sqlx::Type, TS)] +#[serde(rename_all = "lowercase")] +#[sqlx(type_name = "task_status", rename_all = "lowercase")] +#[ts(export)] pub enum TaskStatus { Todo, InProgress, @@ -43,7 +37,8 @@ impl SharedTaskWithUser { } } -#[derive(Debug, Clone, Serialize, Deserialize, sqlx::FromRow)] +#[derive(Debug, Clone, Serialize, Deserialize, sqlx::FromRow, TS)] +#[ts(export)] pub struct SharedTask { pub id: Uuid, pub organization_id: Uuid, @@ -54,19 +49,12 @@ pub struct SharedTask { pub title: String, pub description: Option, pub status: TaskStatus, - pub version: i64, pub deleted_at: Option>, pub shared_at: Option>, pub created_at: DateTime, pub updated_at: DateTime, } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SharedTaskActivityPayload { - pub task: SharedTask, - pub user: Option, -} - #[derive(Debug, Clone, Deserialize)] pub struct CreateSharedTaskData { pub project_id: Uuid, @@ -81,7 +69,6 @@ pub struct UpdateSharedTaskData { pub title: Option, pub description: Option, pub status: Option, - pub version: Option, pub acting_user_id: Uuid, } @@ -89,13 +76,11 @@ pub struct UpdateSharedTaskData { pub struct AssignTaskData { pub new_assignee_user_id: Option, pub previous_assignee_user_id: Option, - pub version: Option, } #[derive(Debug, Clone, Deserialize)] pub struct DeleteTaskData { pub acting_user_id: Uuid, - pub version: Option, } #[derive(Debug, Error)] @@ -141,7 +126,6 @@ impl<'a> SharedTaskRepository<'a> { title AS "title!", description AS "description?", status AS "status!: TaskStatus", - version AS "version!", deleted_at AS "deleted_at?", shared_at AS "shared_at?", created_at AS "created_at!", @@ -205,7 +189,6 @@ impl<'a> SharedTaskRepository<'a> { title AS "title!", description AS "description?", status AS "status!: TaskStatus", - version AS "version!", deleted_at AS "deleted_at?", shared_at AS "shared_at?", created_at AS "created_at!", @@ -226,114 +209,10 @@ impl<'a> SharedTaskRepository<'a> { None => None, }; - insert_activity(&mut tx, &task, user.as_ref(), "task.created").await?; tx.commit().await.map_err(SharedTaskError::from)?; Ok(SharedTaskWithUser::new(task, user)) } - pub async fn bulk_fetch(&self, project_id: Uuid) -> Result { - let mut tx = self.pool.begin().await?; - sqlx::query("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ") - .execute(&mut *tx) - .await?; - - let rows = sqlx::query!( - r#" - SELECT - st.id AS "id!: Uuid", - st.organization_id AS "organization_id!: Uuid", - st.project_id AS "project_id!: Uuid", - st.creator_user_id AS "creator_user_id?: Uuid", - st.assignee_user_id AS "assignee_user_id?: Uuid", - st.deleted_by_user_id AS "deleted_by_user_id?: Uuid", - st.title AS "title!", - st.description AS "description?", - st.status AS "status!: TaskStatus", - st.version AS "version!", - st.deleted_at AS "deleted_at?", - st.shared_at AS "shared_at?", - st.created_at AS "created_at!", - st.updated_at AS "updated_at!", - u.id AS "user_id?: Uuid", - u.first_name AS "user_first_name?", - u.last_name AS "user_last_name?", - u.username AS "user_username?" - FROM shared_tasks st - LEFT JOIN users u ON st.assignee_user_id = u.id - WHERE st.project_id = $1 - AND st.deleted_at IS NULL - ORDER BY st.updated_at DESC - "#, - project_id - ) - .fetch_all(&mut *tx) - .await?; - - let tasks = rows - .into_iter() - .map(|row| { - let task = SharedTask { - id: row.id, - organization_id: row.organization_id, - project_id: row.project_id, - creator_user_id: row.creator_user_id, - assignee_user_id: row.assignee_user_id, - deleted_by_user_id: row.deleted_by_user_id, - title: row.title, - description: row.description, - status: row.status, - version: row.version, - deleted_at: row.deleted_at, - shared_at: row.shared_at, - created_at: row.created_at, - updated_at: row.updated_at, - }; - - let user = row.user_id.map(|id| UserData { - id, - first_name: row.user_first_name, - last_name: row.user_last_name, - username: row.user_username, - }); - - SharedTaskActivityPayload { task, user } - }) - .collect(); - - let deleted_rows = sqlx::query!( - r#" - SELECT st.id AS "id!: Uuid" - FROM shared_tasks st - WHERE st.project_id = $1 - AND st.deleted_at IS NOT NULL - "#, - project_id - ) - .fetch_all(&mut *tx) - .await?; - - let deleted_task_ids = deleted_rows.into_iter().map(|row| row.id).collect(); - - let latest_seq = sqlx::query_scalar!( - r#" - SELECT MAX(seq) - FROM activity - WHERE project_id = $1 - "#, - project_id - ) - .fetch_one(&mut *tx) - .await?; - - tx.commit().await?; - - Ok(BulkFetchResult { - tasks, - deleted_task_ids, - latest_seq, - }) - } - pub async fn update( &self, task_id: Uuid, @@ -348,11 +227,9 @@ impl<'a> SharedTaskRepository<'a> { SET title = COALESCE($2, t.title), description = COALESCE($3, t.description), status = COALESCE($4, t.status), - version = t.version + 1, updated_at = NOW() WHERE t.id = $1 - AND t.version = COALESCE($5, t.version) - AND t.assignee_user_id = $6 + AND t.assignee_user_id = $5 AND t.deleted_at IS NULL RETURNING t.id AS "id!", @@ -364,7 +241,6 @@ impl<'a> SharedTaskRepository<'a> { t.title AS "title!", t.description AS "description?", t.status AS "status!: TaskStatus", - t.version AS "version!", t.deleted_at AS "deleted_at?", t.shared_at AS "shared_at?", t.created_at AS "created_at!", @@ -374,12 +250,11 @@ impl<'a> SharedTaskRepository<'a> { data.title, data.description, data.status as Option, - data.version, data.acting_user_id ) .fetch_optional(&mut *tx) .await? - .ok_or_else(|| SharedTaskError::Conflict("task version mismatch".to_string()))?; + .ok_or_else(|| SharedTaskError::NotFound)?; ensure_text_size(&task.title, task.description.as_deref())?; @@ -388,7 +263,6 @@ impl<'a> SharedTaskRepository<'a> { None => None, }; - insert_activity(&mut tx, &task, user.as_ref(), "task.updated").await?; tx.commit().await.map_err(SharedTaskError::from)?; Ok(SharedTaskWithUser::new(task, user)) } @@ -404,10 +278,8 @@ impl<'a> SharedTaskRepository<'a> { SharedTask, r#" UPDATE shared_tasks AS t - SET assignee_user_id = $2, - version = t.version + 1 + SET assignee_user_id = $2 WHERE t.id = $1 - AND t.version = COALESCE($4, t.version) AND ($3::uuid IS NULL OR t.assignee_user_id = $3::uuid) AND t.deleted_at IS NULL RETURNING @@ -420,7 +292,6 @@ impl<'a> SharedTaskRepository<'a> { t.title AS "title!", t.description AS "description?", t.status AS "status!: TaskStatus", - t.version AS "version!", t.deleted_at AS "deleted_at?", t.shared_at AS "shared_at?", t.created_at AS "created_at!", @@ -428,21 +299,17 @@ impl<'a> SharedTaskRepository<'a> { "#, task_id, data.new_assignee_user_id, - data.previous_assignee_user_id, - data.version + data.previous_assignee_user_id ) .fetch_optional(&mut *tx) .await? - .ok_or_else(|| { - SharedTaskError::Conflict("task version or previous assignee mismatch".to_string()) - })?; + .ok_or_else(|| SharedTaskError::Conflict("previous assignee mismatch".to_string()))?; let user = match data.new_assignee_user_id { Some(user_id) => fetch_user(&mut tx, user_id).await?, None => None, }; - insert_activity(&mut tx, &task, user.as_ref(), "task.reassigned").await?; tx.commit().await.map_err(SharedTaskError::from)?; Ok(SharedTaskWithUser::new(task, user)) } @@ -459,11 +326,9 @@ impl<'a> SharedTaskRepository<'a> { r#" UPDATE shared_tasks AS t SET deleted_at = NOW(), - deleted_by_user_id = $3, - version = t.version + 1 + deleted_by_user_id = $2 WHERE t.id = $1 - AND t.version = COALESCE($2, t.version) - AND t.assignee_user_id = $3 + AND t.assignee_user_id = $2 AND t.deleted_at IS NULL RETURNING t.id AS "id!", @@ -475,26 +340,44 @@ impl<'a> SharedTaskRepository<'a> { t.title AS "title!", t.description AS "description?", t.status AS "status!: TaskStatus", - t.version AS "version!", t.deleted_at AS "deleted_at?", t.shared_at AS "shared_at?", t.created_at AS "created_at!", t.updated_at AS "updated_at!" "#, task_id, - data.version, data.acting_user_id ) .fetch_optional(&mut *tx) .await? - .ok_or_else(|| { - SharedTaskError::Conflict("task version mismatch or user not authorized".to_string()) - })?; + .ok_or_else(|| SharedTaskError::Conflict("user not authorized".to_string()))?; - insert_activity(&mut tx, &task, None, "task.deleted").await?; tx.commit().await.map_err(SharedTaskError::from)?; Ok(SharedTaskWithUser::new(task, None)) } + + pub async fn check_existence( + &self, + task_ids: &[Uuid], + user_id: Uuid, + ) -> Result, SharedTaskError> { + let tasks = sqlx::query!( + r#" + SELECT t.id + FROM shared_tasks t + INNER JOIN organization_member_metadata om ON t.organization_id = om.organization_id + WHERE t.id = ANY($1) + AND t.deleted_at IS NULL + AND om.user_id = $2 + "#, + task_ids, + user_id + ) + .fetch_all(self.pool) + .await?; + + Ok(tasks.into_iter().map(|r| r.id).collect()) + } } pub(crate) fn ensure_text_size( @@ -510,81 +393,6 @@ pub(crate) fn ensure_text_size( Ok(()) } -async fn insert_activity( - tx: &mut Tx<'_>, - task: &SharedTask, - user: Option<&UserData>, - event_type: &str, -) -> Result<(), SharedTaskError> { - let payload = SharedTaskActivityPayload { - task: task.clone(), - user: user.cloned(), - }; - let payload = serde_json::to_value(payload).map_err(SharedTaskError::Serialization)?; - - // First attempt at inserting - if partitions are missing we retry after provisioning. - match do_insert_activity(tx, task, event_type, payload.clone()).await { - Ok(_) => Ok(()), - Err(err) => { - if let sqlx::Error::Database(db_err) = &err - && maintenance::is_partition_missing_error(db_err.as_ref()) - { - let code_owned = db_err.code().map(|c| c.to_string()); - let code = code_owned.as_deref().unwrap_or_default(); - tracing::warn!( - "Activity partition missing ({}), creating current and next partitions", - code - ); - - maintenance::ensure_future_partitions(tx.as_mut()) - .await - .map_err(SharedTaskError::from)?; - - return do_insert_activity(tx, task, event_type, payload) - .await - .map_err(SharedTaskError::from); - } - - Err(SharedTaskError::from(err)) - } - } -} - -async fn do_insert_activity( - tx: &mut Tx<'_>, - task: &SharedTask, - event_type: &str, - payload: serde_json::Value, -) -> Result<(), sqlx::Error> { - sqlx::query!( - r#" - WITH next AS ( - INSERT INTO project_activity_counters AS counters (project_id, last_seq) - VALUES ($1, 1) - ON CONFLICT (project_id) - DO UPDATE SET last_seq = counters.last_seq + 1 - RETURNING last_seq - ) - INSERT INTO activity ( - project_id, - seq, - assignee_user_id, - event_type, - payload - ) - SELECT $1, next.last_seq, $2, $3, $4 - FROM next - "#, - task.project_id, - task.assignee_user_id, - event_type, - payload - ) - .execute(&mut **tx) - .await - .map(|_| ()) -} - impl SharedTaskRepository<'_> { pub async fn organization_id( pool: &PgPool, diff --git a/crates/remote/src/db/users.rs b/crates/remote/src/db/users.rs index bb344704..a0015e04 100644 --- a/crates/remote/src/db/users.rs +++ b/crates/remote/src/db/users.rs @@ -1,6 +1,7 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::{PgPool, query_as}; +use ts_rs::TS; use uuid::Uuid; use super::{Tx, identity_errors::IdentityError}; @@ -16,9 +17,10 @@ pub struct User { pub updated_at: DateTime, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, sqlx::FromRow, TS)] +#[ts(export)] pub struct UserData { - pub id: Uuid, + pub user_id: Uuid, pub first_name: Option, pub last_name: Option, pub username: Option, @@ -91,6 +93,34 @@ impl<'a> UserRepository<'a> { .await .map_err(IdentityError::from) } + + /// Fetch all assignees for a given project id. + /// Returns Vec containing all unique users assigned to tasks in the project. + pub async fn fetch_assignees_by_project( + &self, + project_id: Uuid, + ) -> Result, IdentityError> { + let rows = sqlx::query_as!( + UserData, + r#" + SELECT DISTINCT + u.id as "user_id", + u.first_name as "first_name", + u.last_name as "last_name", + u.username as "username" + FROM shared_tasks st + INNER JOIN users u ON u.id = st.assignee_user_id + WHERE st.project_id = $1 + AND st.assignee_user_id IS NOT NULL + "#, + project_id + ) + .fetch_all(self.pool) + .await + .map_err(IdentityError::from)?; + + Ok(rows) + } } async fn upsert_user(pool: &PgPool, user: &UpsertUser<'_>) -> Result { @@ -141,7 +171,7 @@ pub async fn fetch_user(tx: &mut Tx<'_>, user_id: Uuid) -> Result = OnceLock::new(); diff --git a/crates/remote/src/routes/activity.rs b/crates/remote/src/routes/activity.rs deleted file mode 100644 index 0d662623..00000000 --- a/crates/remote/src/routes/activity.rs +++ /dev/null @@ -1,67 +0,0 @@ -use axum::{ - Json, Router, - extract::{Extension, Query, State}, - http::StatusCode, - response::{IntoResponse, Response}, - routing::get, -}; -use serde::Deserialize; -use tracing::instrument; -use uuid::Uuid; - -use super::{error::ErrorResponse, organization_members::ensure_project_access}; -use crate::{ - AppState, activity::ActivityResponse, auth::RequestContext, db::activity::ActivityRepository, -}; - -pub fn router() -> Router { - Router::new().route("/activity", get(get_activity_stream)) -} - -#[derive(Debug, Deserialize)] -pub struct ActivityQuery { - /// Remote project to stream activity for - pub project_id: Uuid, - /// Fetch events after this ID (exclusive) - pub after: Option, - /// Maximum number of events to return - pub limit: Option, -} - -#[instrument( - name = "activity.get_activity_stream", - skip(state, ctx, params), - fields(user_id = %ctx.user.id, project_id = %params.project_id) -)] -async fn get_activity_stream( - State(state): State, - Extension(ctx): Extension, - Query(params): Query, -) -> Response { - let config = state.config(); - let limit = params - .limit - .unwrap_or(config.activity_default_limit) - .clamp(1, config.activity_max_limit); - let after = params.after; - let project_id = params.project_id; - - let _organization_id = match ensure_project_access(state.pool(), ctx.user.id, project_id).await - { - Ok(org_id) => org_id, - Err(error) => return error.into_response(), - }; - - let repo = ActivityRepository::new(state.pool()); - match repo.fetch_since(project_id, after, limit).await { - Ok(events) => (StatusCode::OK, Json(ActivityResponse { data: events })).into_response(), - Err(error) => { - tracing::error!(?error, "failed to load activity stream"); - ErrorResponse::new( - StatusCode::INTERNAL_SERVER_ERROR, - "failed to load activity stream", - ) - .into_response() - } - } -} diff --git a/crates/remote/src/routes/electric_proxy.rs b/crates/remote/src/routes/electric_proxy.rs new file mode 100644 index 00000000..4be87744 --- /dev/null +++ b/crates/remote/src/routes/electric_proxy.rs @@ -0,0 +1,182 @@ +use std::collections::HashMap; + +use axum::{ + Router, + body::Body, + extract::{Query, State}, + http::{HeaderMap, HeaderValue, StatusCode, header}, + response::{IntoResponse, Response}, + routing::get, +}; +use futures::TryStreamExt; +use secrecy::ExposeSecret; +use tracing::error; +use uuid::Uuid; + +use crate::{ + AppState, auth::RequestContext, db::organizations::OrganizationRepository, validated_where, + validated_where::ValidatedWhere, +}; + +pub fn router() -> Router { + Router::new().route("/shape/shared_tasks", get(proxy_shared_tasks)) +} + +/// Electric protocol query parameters that are safe to forward. +/// Based on https://electric-sql.com/docs/guides/auth#proxy-auth +/// Note: "where" is NOT included because it's controlled server-side for security. +const ELECTRIC_PARAMS: &[&str] = &["offset", "handle", "live", "cursor", "columns"]; + +/// Returns an empty shape response for users with no organization memberships. +fn empty_shape_response() -> Response { + let mut headers = HeaderMap::new(); + headers.insert( + header::CONTENT_TYPE, + HeaderValue::from_static("application/json"), + ); + (StatusCode::OK, headers, "[]").into_response() +} + +/// Proxy Shape requests for the `shared_tasks` table. +/// +/// Route: GET /v1/shape/shared_tasks?offset=-1 +/// +/// The `require_session` middleware has already validated the Bearer token +/// before this handler is called. +pub async fn proxy_shared_tasks( + State(state): State, + axum::extract::Extension(ctx): axum::extract::Extension, + Query(params): Query>, +) -> Result { + // Get user's organization memberships + let org_repo = OrganizationRepository::new(state.pool()); + let orgs = org_repo + .list_user_organizations(ctx.user.id) + .await + .map_err(|e| ProxyError::Authorization(format!("failed to fetch organizations: {e}")))?; + + if orgs.is_empty() { + // User has no org memberships - return empty result + return Ok(empty_shape_response()); + } + + // Build org_id filter using compile-time validated WHERE clause + let org_uuids: Vec = orgs.iter().map(|o| o.id).collect(); + let query = validated_where!("shared_tasks", r#""organization_id" = ANY($1)"#, &org_uuids); + let query_params = &[format!( + "{{{}}}", + org_uuids + .iter() + .map(|u| u.to_string()) + .collect::>() + .join(",") + )]; + tracing::debug!("Proxying Electric Shape request for shared_tasks table{query:?}"); + proxy_table(&state, &query, ¶ms, query_params).await +} + +/// Proxy a Shape request to Electric for a specific table. +/// +/// The table and where clause are set server-side (not from client params) +/// to prevent unauthorized access to other tables or data. +async fn proxy_table( + state: &AppState, + query: &ValidatedWhere, + client_params: &HashMap, + electric_params: &[String], +) -> Result { + // Build the Electric URL + let mut origin_url = url::Url::parse(&state.config.electric_url) + .map_err(|e| ProxyError::InvalidConfig(format!("invalid electric_url: {e}")))?; + + origin_url.set_path("/v1/shape"); + + // Set table server-side (security: client can't override) + origin_url + .query_pairs_mut() + .append_pair("table", query.table); + + // Set WHERE clause with parameterized values + origin_url + .query_pairs_mut() + .append_pair("where", query.where_clause); + + // Pass params for $1, $2, etc. placeholders + for (i, param) in electric_params.iter().enumerate() { + origin_url + .query_pairs_mut() + .append_pair(&format!("params[{}]", i + 1), param); + } + + // Forward safe client params + for (key, value) in client_params { + if ELECTRIC_PARAMS.contains(&key.as_str()) { + origin_url.query_pairs_mut().append_pair(key, value); + } + } + + if let Some(secret) = &state.config.electric_secret { + origin_url + .query_pairs_mut() + .append_pair("secret", secret.expose_secret()); + } + + let response = state + .http_client + .get(origin_url.as_str()) + .send() + .await + .map_err(ProxyError::Connection)?; + + let status = response.status(); + + let mut headers = HeaderMap::new(); + + // Copy headers from Electric response, but remove problematic ones + for (key, value) in response.headers() { + // Skip headers that interfere with browser handling + if key == header::CONTENT_ENCODING || key == header::CONTENT_LENGTH { + continue; + } + headers.insert(key.clone(), value.clone()); + } + + // Add Vary header for proper caching with auth + headers.insert(header::VARY, HeaderValue::from_static("Authorization")); + + // Stream the response body directly without buffering + let body_stream = response.bytes_stream().map_err(std::io::Error::other); + let body = Body::from_stream(body_stream); + + Ok((status, headers, body).into_response()) +} + +#[derive(Debug)] +pub enum ProxyError { + Connection(reqwest::Error), + InvalidConfig(String), + Authorization(String), +} + +impl IntoResponse for ProxyError { + fn into_response(self) -> Response { + match self { + ProxyError::Connection(err) => { + error!(?err, "failed to connect to Electric service"); + ( + StatusCode::BAD_GATEWAY, + "failed to connect to Electric service", + ) + .into_response() + } + ProxyError::InvalidConfig(msg) => { + error!(%msg, "invalid Electric proxy configuration"); + (StatusCode::INTERNAL_SERVER_ERROR, "internal server error").into_response() + } + ProxyError::Authorization(msg) => { + error!(%msg, "authorization failed for Electric proxy"); + (StatusCode::FORBIDDEN, "forbidden").into_response() + } + } + } +} diff --git a/crates/remote/src/routes/mod.rs b/crates/remote/src/routes/mod.rs index 7c8dc9fb..8ba11007 100644 --- a/crates/remote/src/routes/mod.rs +++ b/crates/remote/src/routes/mod.rs @@ -5,7 +5,7 @@ use axum::{ routing::get, }; use tower_http::{ - cors::CorsLayer, + cors::{AllowHeaders, AllowMethods, AllowOrigin, CorsLayer}, request_id::{MakeRequestUuid, PropagateRequestIdLayer, RequestId, SetRequestIdLayer}, services::{ServeDir, ServeFile}, trace::{DefaultOnFailure, DefaultOnResponse, TraceLayer}, @@ -14,7 +14,7 @@ use tracing::{Level, field}; use crate::{AppState, auth::require_session}; -pub mod activity; +mod electric_proxy; mod error; mod identity; mod oauth; @@ -53,13 +53,12 @@ pub fn router(state: AppState) -> Router { let v1_protected = Router::::new() .merge(identity::router()) - .merge(activity::router()) .merge(projects::router()) .merge(tasks::router()) .merge(organizations::router()) .merge(organization_members::protected_router()) .merge(oauth::protected_router()) - .merge(crate::ws::router()) + .merge(electric_proxy::router()) .layer(middleware::from_fn_with_state( state.clone(), require_session, @@ -73,7 +72,13 @@ pub fn router(state: AppState) -> Router { .nest("/v1", v1_public) .nest("/v1", v1_protected) .fallback_service(spa) - .layer(CorsLayer::permissive()) + .layer( + CorsLayer::new() + .allow_origin(AllowOrigin::mirror_request()) + .allow_methods(AllowMethods::mirror_request()) + .allow_headers(AllowHeaders::mirror_request()) + .allow_credentials(true), + ) .layer(trace_layer) .layer(PropagateRequestIdLayer::new(HeaderName::from_static( "x-request-id", diff --git a/crates/remote/src/routes/tasks.rs b/crates/remote/src/routes/tasks.rs index 4c23e09d..7c378be6 100644 --- a/crates/remote/src/routes/tasks.rs +++ b/crates/remote/src/routes/tasks.rs @@ -8,6 +8,7 @@ use axum::{ use serde::{Deserialize, Serialize}; use serde_json::json; use tracing::{Span, instrument}; +use ts_rs::TS; use uuid::Uuid; use super::{ @@ -30,60 +31,54 @@ use crate::{ pub fn router() -> Router { Router::new() - .route("/tasks/bulk", get(bulk_shared_tasks)) .route("/tasks", post(create_shared_task)) + .route("/tasks/check", post(check_tasks_existence)) .route("/tasks/{task_id}", patch(update_shared_task)) .route("/tasks/{task_id}", delete(delete_shared_task)) .route("/tasks/{task_id}/assign", post(assign_task)) + .route("/tasks/assignees", get(get_task_assignees_by_project)) } -#[derive(Debug, Deserialize)] -pub struct BulkTasksQuery { +#[derive(Debug, Deserialize, TS)] +#[ts(export)] +pub struct AssigneesQuery { pub project_id: Uuid, } #[instrument( - name = "tasks.bulk_shared_tasks", + name = "tasks.get_task_assignees_by_project", skip(state, ctx, query), fields(user_id = %ctx.user.id, project_id = %query.project_id, org_id = tracing::field::Empty) )] -pub async fn bulk_shared_tasks( +pub async fn get_task_assignees_by_project( State(state): State, Extension(ctx): Extension, - Query(query): Query, + Query(query): Query, ) -> Response { let pool = state.pool(); - let _organization_id = match ensure_project_access(pool, ctx.user.id, query.project_id).await { - Ok(org_id) => { - Span::current().record("org_id", format_args!("{org_id}")); - org_id + + let _org_id = match ensure_project_access(pool, ctx.user.id, query.project_id).await { + Ok(org) => { + Span::current().record("org_id", format_args!("{org}")); + org } Err(error) => return error.into_response(), }; - let repo = SharedTaskRepository::new(pool); - match repo.bulk_fetch(query.project_id).await { - Ok(snapshot) => ( - StatusCode::OK, - Json(BulkSharedTasksResponse { - tasks: snapshot.tasks, - deleted_task_ids: snapshot.deleted_task_ids, - latest_seq: snapshot.latest_seq, - }), - ) - .into_response(), - Err(error) => match error { - SharedTaskError::Database(err) => { - tracing::error!(?err, "failed to load shared task snapshot"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(json!({ "error": "failed to load shared tasks" })), - ) - .into_response() - } - other => task_error_response(other, "failed to load shared tasks"), - }, - } + let user_repo = UserRepository::new(pool); + let assignees = match user_repo.fetch_assignees_by_project(query.project_id).await { + Ok(names) => names, + Err(e) => { + tracing::error!(?e, "failed to load assignees"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(json!({"error": "failed to load assignees"})), + ) + .into_response(); + } + }; + + (StatusCode::OK, Json(assignees)).into_response() } #[instrument( @@ -185,7 +180,6 @@ pub async fn update_shared_task( title, description, status, - version, } = payload; let next_title = title.as_deref().unwrap_or(existing.title.as_str()); @@ -199,7 +193,6 @@ pub async fn update_shared_task( title, description, status, - version, acting_user_id: ctx.user.id, }; @@ -263,7 +256,6 @@ pub async fn assign_task( let data = AssignTaskData { new_assignee_user_id: payload.new_assignee_user_id, previous_assignee_user_id: Some(ctx.user.id), - version: payload.version, }; match repo.assign_task(task_id, data).await { @@ -274,14 +266,13 @@ pub async fn assign_task( #[instrument( name = "tasks.delete_shared_task", - skip(state, ctx, payload), + skip(state, ctx), fields(user_id = %ctx.user.id, task_id = %task_id, org_id = tracing::field::Empty) )] pub async fn delete_shared_task( State(state): State, Extension(ctx): Extension, Path(task_id): Path, - payload: Option>, ) -> Response { let pool = state.pool(); let _organization_id = match ensure_task_access(pool, ctx.user.id, task_id).await { @@ -311,11 +302,8 @@ pub async fn delete_shared_task( ); } - let version = payload.as_ref().and_then(|body| body.0.version); - let data = DeleteTaskData { acting_user_id: ctx.user.id, - version, }; match repo.delete_task(task_id, data).await { @@ -324,11 +312,28 @@ pub async fn delete_shared_task( } } +#[instrument( + name = "tasks.check_existence", + skip(state, ctx, payload), + fields(user_id = %ctx.user.id) +)] +pub async fn check_tasks_existence( + State(state): State, + Extension(ctx): Extension, + Json(payload): Json, +) -> Response { + let pool = state.pool(); + let repo = SharedTaskRepository::new(pool); + + match repo.check_existence(&payload.task_ids, ctx.user.id).await { + Ok(existing_ids) => (StatusCode::OK, Json(existing_ids)).into_response(), + Err(error) => task_error_response(error, "failed to check tasks existence"), + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BulkSharedTasksResponse { - pub tasks: Vec, - pub deleted_task_ids: Vec, - pub latest_seq: Option, +pub struct CheckTasksRequest { + pub task_ids: Vec, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -344,21 +349,15 @@ pub struct UpdateSharedTaskRequest { pub title: Option, pub description: Option, pub status: Option, - pub version: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AssignSharedTaskRequest { pub new_assignee_user_id: Option, - pub version: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DeleteSharedTaskRequest { - pub version: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] pub struct SharedTaskResponse { pub task: SharedTask, pub user: Option, diff --git a/crates/remote/src/state.rs b/crates/remote/src/state.rs index 3f6552f8..516e5f2e 100644 --- a/crates/remote/src/state.rs +++ b/crates/remote/src/state.rs @@ -3,7 +3,6 @@ use std::sync::Arc; use sqlx::PgPool; use crate::{ - activity::ActivityBroker, auth::{JwtService, OAuthHandoffService, OAuthTokenValidator, ProviderRegistry}, config::RemoteServerConfig, mail::Mailer, @@ -12,34 +11,34 @@ use crate::{ #[derive(Clone)] pub struct AppState { pub pool: PgPool, - pub broker: ActivityBroker, pub config: RemoteServerConfig, pub jwt: Arc, pub mailer: Arc, pub server_public_base_url: String, - pub handoff: Arc, - pub oauth_token_validator: Arc, + pub http_client: reqwest::Client, + handoff: Arc, + oauth_token_validator: Arc, } impl AppState { #[allow(clippy::too_many_arguments)] pub fn new( pool: PgPool, - broker: ActivityBroker, config: RemoteServerConfig, jwt: Arc, handoff: Arc, oauth_token_validator: Arc, mailer: Arc, server_public_base_url: String, + http_client: reqwest::Client, ) -> Self { Self { pool, - broker, config, jwt, mailer, server_public_base_url, + http_client, handoff, oauth_token_validator, } @@ -49,10 +48,6 @@ impl AppState { &self.pool } - pub fn broker(&self) -> &ActivityBroker { - &self.broker - } - pub fn config(&self) -> &RemoteServerConfig { &self.config } diff --git a/crates/remote/src/validated_where.rs b/crates/remote/src/validated_where.rs new file mode 100644 index 00000000..0db3285b --- /dev/null +++ b/crates/remote/src/validated_where.rs @@ -0,0 +1,20 @@ +#[derive(Debug)] +pub struct ValidatedWhere { + pub table: &'static str, + pub where_clause: &'static str, +} +#[macro_export] +macro_rules! validated_where { + ($table:literal, $where:literal $(, $arg:expr)* $(,)?) => {{ + // Compile-time validation via SQLx using + concatenation + // This checks: table exists, columns exist, arg types are correct + let _ = sqlx::query!( + "SELECT 1 AS v FROM " + $table + " WHERE " + $where + $(, $arg)* + ); + $crate::validated_where::ValidatedWhere { + table: $table, + where_clause: $where, + } + }}; +} diff --git a/crates/remote/src/ws/message.rs b/crates/remote/src/ws/message.rs deleted file mode 100644 index 44c16276..00000000 --- a/crates/remote/src/ws/message.rs +++ /dev/null @@ -1,21 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::activity::ActivityEvent; - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "type", content = "data")] -pub enum ClientMessage { - #[serde(rename = "ack")] - Ack { cursor: i64 }, - #[serde(rename = "auth-token")] - AuthToken { token: String }, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "type", content = "data")] -pub enum ServerMessage { - #[serde(rename = "activity")] - Activity(ActivityEvent), - #[serde(rename = "error")] - Error { message: String }, -} diff --git a/crates/remote/src/ws/mod.rs b/crates/remote/src/ws/mod.rs deleted file mode 100644 index 7667f0a4..00000000 --- a/crates/remote/src/ws/mod.rs +++ /dev/null @@ -1,41 +0,0 @@ -use axum::{ - Router, - extract::{Extension, Query, State, ws::WebSocketUpgrade}, - response::IntoResponse, - routing::get, -}; -use serde::Deserialize; -use uuid::Uuid; - -use crate::{AppState, auth::RequestContext}; - -pub mod message; -mod session; - -#[derive(Debug, Deserialize, Clone)] -pub struct WsQueryParams { - pub project_id: Uuid, - pub cursor: Option, -} - -pub fn router() -> Router { - Router::new().route("/ws", get(upgrade)) -} - -async fn upgrade( - ws: WebSocketUpgrade, - State(state): State, - Extension(ctx): Extension, - Query(params): Query, -) -> impl IntoResponse { - match crate::routes::organization_members::ensure_project_access( - state.pool(), - ctx.user.id, - params.project_id, - ) - .await - { - Ok(_) => ws.on_upgrade(move |socket| session::handle(socket, state, ctx, params)), - Err(error) => error.into_response(), - } -} diff --git a/crates/remote/src/ws/session.rs b/crates/remote/src/ws/session.rs deleted file mode 100644 index e5ec3a60..00000000 --- a/crates/remote/src/ws/session.rs +++ /dev/null @@ -1,512 +0,0 @@ -use std::sync::Arc; - -use axum::extract::ws::{Message, WebSocket}; -use chrono::{DateTime, Duration as ChronoDuration, Utc}; -use futures::{SinkExt, StreamExt}; -use sqlx::PgPool; -use thiserror::Error; -use tokio::time::{self, MissedTickBehavior}; -use tokio_stream::wrappers::errors::BroadcastStreamRecvError; -use tracing::{Span, instrument}; -use utils::ws::{WS_AUTH_REFRESH_INTERVAL, WS_BULK_SYNC_THRESHOLD, WS_TOKEN_EXPIRY_GRACE}; -use uuid::Uuid; - -use super::{ - WsQueryParams, - message::{ClientMessage, ServerMessage}, -}; -use crate::{ - AppState, - activity::{ActivityBroker, ActivityEvent, ActivityStream}, - auth::{JwtError, JwtService, RequestContext}, - db::{ - activity::ActivityRepository, - auth::{AuthSessionError, AuthSessionRepository}, - }, -}; - -#[instrument( - name = "ws.session", - skip(socket, state, ctx, params), - fields( - user_id = %ctx.user.id, - project_id = %params.project_id, - org_id = tracing::field::Empty, - session_id = %ctx.session_id - ) -)] -pub async fn handle( - socket: WebSocket, - state: AppState, - ctx: RequestContext, - params: WsQueryParams, -) { - let config = state.config(); - let pool_ref = state.pool(); - let project_id = params.project_id; - let organization_id = match crate::routes::organization_members::ensure_project_access( - pool_ref, - ctx.user.id, - project_id, - ) - .await - { - Ok(org_id) => org_id, - Err(error) => { - tracing::info!( - ?error, - user_id = %ctx.user.id, - %project_id, - "websocket project access denied" - ); - return; - } - }; - Span::current().record("org_id", format_args!("{organization_id}")); - - let pool = pool_ref.clone(); - let mut last_sent_seq = params.cursor; - let mut auth_state = WsAuthState::new( - state.jwt(), - pool.clone(), - ctx.session_id, - ctx.user.id, - project_id, - ctx.access_token_expires_at, - ); - let mut auth_check_interval = time::interval(WS_AUTH_REFRESH_INTERVAL); - auth_check_interval.set_missed_tick_behavior(MissedTickBehavior::Skip); - - let (mut sender, mut inbound) = socket.split(); - let mut activity_stream = state.broker().subscribe(project_id); - - if let Ok(history) = ActivityRepository::new(&pool) - .fetch_since(project_id, params.cursor, config.activity_default_limit) - .await - { - for event in history { - if send_activity(&mut sender, &event).await.is_err() { - return; - } - last_sent_seq = Some(event.seq); - } - } - - tracing::debug!(org_id = %organization_id, project_id = %project_id, "starting websocket session"); - - loop { - tokio::select! { - maybe_activity = activity_stream.next() => { - match maybe_activity { - Some(Ok(event)) => { - tracing::trace!(?event, "received activity event"); - assert_eq!(event.project_id, project_id, "activity stream emitted cross-project event"); - if let Some(prev_seq) = last_sent_seq { - if prev_seq >= event.seq { - continue; - } - if event.seq > prev_seq + 1 { - tracing::warn!( - expected_next = prev_seq + 1, - actual = event.seq, - org_id = %organization_id, - project_id = %project_id, - "activity stream skipped sequence; running catch-up" - ); - match activity_stream_catch_up( - &mut sender, - &pool, - project_id, - organization_id, - prev_seq, - state.broker(), - config.activity_catchup_batch_size, - WS_BULK_SYNC_THRESHOLD as i64, - "gap", - ).await { - Ok((seq, stream)) => { - last_sent_seq = Some(seq); - activity_stream = stream; - } - Err(()) => break, - } - continue; - } - } - if send_activity(&mut sender, &event).await.is_err() { - break; - } - last_sent_seq = Some(event.seq); - } - Some(Err(BroadcastStreamRecvError::Lagged(skipped))) => { - tracing::warn!(skipped, org_id = %organization_id, project_id = %project_id, "activity stream lagged"); - let Some(prev_seq) = last_sent_seq else { - tracing::info!( - org_id = %organization_id, - project_id = %project_id, - "activity stream lagged without baseline; forcing bulk sync" - ); - let _ = send_error(&mut sender, "activity backlog dropped").await; - break; - }; - - match activity_stream_catch_up( - &mut sender, - &pool, - project_id, - organization_id, - prev_seq, - state.broker(), - config.activity_catchup_batch_size, - WS_BULK_SYNC_THRESHOLD as i64, - "lag", - ).await { - Ok((seq, stream)) => { - last_sent_seq = Some(seq); - activity_stream = stream; - } - Err(()) => break, - } - } - None => break, - } - } - - maybe_message = inbound.next() => { - match maybe_message { - Some(Ok(msg)) => { - if matches!(msg, Message::Close(_)) { - break; - } - if let Message::Text(text) = msg { - match serde_json::from_str::(&text) { - Ok(ClientMessage::Ack { .. }) => {} - Ok(ClientMessage::AuthToken { token }) => { - auth_state.store_token(token); - } - Err(error) => { - tracing::debug!(?error, "invalid inbound message"); - } - } - } - } - Some(Err(error)) => { - tracing::debug!(?error, "websocket receive error"); - break; - } - None => break, - } - } - - _ = auth_check_interval.tick() => { - match auth_state.verify().await { - Ok(()) => {} - Err(error) => { - tracing::info!(?error, "closing websocket due to auth verification error"); - let message = match error { - AuthVerifyError::Revoked => "authorization revoked", - AuthVerifyError::MembershipRevoked => "project access revoked", - AuthVerifyError::Expired => "authorization expired", - AuthVerifyError::UserMismatch { .. } - | AuthVerifyError::Decode(_) - | AuthVerifyError::Session(_) => "authorization error", - }; - let _ = send_error(&mut sender, message).await; - let _ = sender.send(Message::Close(None)).await; - break; - } - } - } - } - } -} - -async fn send_activity( - sender: &mut futures::stream::SplitSink, - event: &ActivityEvent, -) -> Result<(), ()> { - tracing::trace!( - event_type = %event.event_type.as_str(), - project_id = %event.project_id, - "sending activity event" - ); - - match serde_json::to_string(&ServerMessage::Activity(event.clone())) { - Ok(json) => sender - .send(Message::Text(json.into())) - .await - .map_err(|error| { - tracing::debug!(?error, "failed to send activity message"); - }), - Err(error) => { - tracing::error!(?error, "failed to serialise activity event"); - Err(()) - } - } -} - -async fn send_error( - sender: &mut futures::stream::SplitSink, - message: &str, -) -> Result<(), ()> { - match serde_json::to_string(&ServerMessage::Error { - message: message.to_string(), - }) { - Ok(json) => sender - .send(Message::Text(json.into())) - .await - .map_err(|error| { - tracing::debug!(?error, "failed to send websocket error message"); - }), - Err(error) => { - tracing::error!(?error, "failed to serialise websocket error message"); - Err(()) - } - } -} - -struct WsAuthState { - jwt: Arc, - pool: PgPool, - session_id: Uuid, - expected_user_id: Uuid, - project_id: Uuid, - token_expires_at: DateTime, - new_access_token: Option, -} - -impl WsAuthState { - fn new( - jwt: Arc, - pool: PgPool, - session_id: Uuid, - expected_user_id: Uuid, - project_id: Uuid, - token_expires_at: DateTime, - ) -> Self { - Self { - jwt, - pool, - session_id, - expected_user_id, - project_id, - new_access_token: None, - token_expires_at, - } - } - - fn store_token(&mut self, token: String) { - self.new_access_token = Some(token); - } - - async fn verify(&mut self) -> Result<(), AuthVerifyError> { - if let Some(token) = self.new_access_token.take() { - let token_details = self - .jwt - .decode_access_token_with_leeway(&token, WS_TOKEN_EXPIRY_GRACE.as_secs()) - .map_err(AuthVerifyError::Decode)?; - self.apply_identity(token_details.user_id, token_details.session_id) - .await?; - self.token_expires_at = token_details.expires_at; - } - - self.validate_token_expiry()?; - self.validate_session().await?; - self.validate_membership().await - } - - async fn apply_identity( - &mut self, - user_id: Uuid, - session_id: Uuid, - ) -> Result<(), AuthVerifyError> { - if user_id != self.expected_user_id { - return Err(AuthVerifyError::UserMismatch { - expected: self.expected_user_id, - received: user_id, - }); - } - - self.session_id = session_id; - self.validate_session().await - } - - fn validate_token_expiry(&self) -> Result<(), AuthVerifyError> { - if self.token_expires_at + ws_leeway_duration() > Utc::now() { - return Ok(()); - } - Err(AuthVerifyError::Expired) - } - - async fn validate_session(&self) -> Result<(), AuthVerifyError> { - let repo = AuthSessionRepository::new(&self.pool); - let session = repo - .get(self.session_id) - .await - .map_err(AuthVerifyError::Session)?; - - if session.revoked_at.is_some() { - return Err(AuthVerifyError::Revoked); - } - - Ok(()) - } - - async fn validate_membership(&self) -> Result<(), AuthVerifyError> { - crate::routes::organization_members::ensure_project_access( - &self.pool, - self.expected_user_id, - self.project_id, - ) - .await - .map(|_| ()) - .map_err(|error| { - tracing::warn!( - ?error, - user_id = %self.expected_user_id, - project_id = %self.project_id, - "websocket membership validation failed" - ); - AuthVerifyError::MembershipRevoked - }) - } -} - -fn ws_leeway_duration() -> ChronoDuration { - ChronoDuration::from_std(WS_TOKEN_EXPIRY_GRACE).unwrap() -} - -#[derive(Debug, Error)] -enum AuthVerifyError { - #[error(transparent)] - Decode(#[from] JwtError), - #[error("received token for unexpected user: expected {expected}, received {received}")] - UserMismatch { expected: Uuid, received: Uuid }, - #[error(transparent)] - Session(#[from] AuthSessionError), - #[error("session revoked")] - Revoked, - #[error("organization membership revoked")] - MembershipRevoked, - #[error("access token expired")] - Expired, -} - -#[allow(clippy::too_many_arguments)] -async fn activity_stream_catch_up( - sender: &mut futures::stream::SplitSink, - pool: &PgPool, - project_id: Uuid, - organization_id: Uuid, - last_seq: i64, - broker: &ActivityBroker, - batch_size: i64, - bulk_limit: i64, - reason: &'static str, -) -> Result<(i64, ActivityStream), ()> { - let mut activity_stream = broker.subscribe(project_id); - - let event = match activity_stream.next().await { - Some(Ok(event)) => event, - Some(Err(_)) | None => { - let _ = send_error(sender, "activity backlog dropped").await; - return Err(()); - } - }; - let target_seq = event.seq; - - if target_seq <= last_seq { - return Ok((last_seq, activity_stream)); - } - - let bulk_limit = bulk_limit.max(1); - let diff = target_seq - last_seq; - if diff > bulk_limit { - tracing::info!( - org_id = %organization_id, - project_id = %project_id, - threshold = bulk_limit, - reason, - "activity catch up exceeded threshold; forcing bulk sync" - ); - let _ = send_error(sender, "activity backlog dropped").await; - return Err(()); - } - - let catch_up_result = catch_up_from_db( - sender, - pool, - project_id, - organization_id, - last_seq, - target_seq, - batch_size.max(1), - ) - .await; - - match catch_up_result { - Ok(seq) => Ok((seq, activity_stream)), - Err(CatchUpError::Stale) => { - let _ = send_error(sender, "activity backlog dropped").await; - Err(()) - } - Err(CatchUpError::Send) => Err(()), - } -} - -#[derive(Debug, Error)] -enum CatchUpError { - #[error("activity stream went stale during catch up")] - Stale, - #[error("failed to send activity event")] - Send, -} - -async fn catch_up_from_db( - sender: &mut futures::stream::SplitSink, - pool: &PgPool, - project_id: Uuid, - organization_id: Uuid, - last_seq: i64, - target_seq: i64, - batch_size: i64, -) -> Result { - let repository = ActivityRepository::new(pool); - let mut current_seq = last_seq; - let mut cursor = last_seq; - - loop { - let events = repository - .fetch_since(project_id, Some(cursor), batch_size) - .await - .map_err(|error| { - tracing::error!(?error, org_id = %organization_id, project_id = %project_id, "failed to fetch activity catch up"); - CatchUpError::Stale - })?; - - if events.is_empty() { - tracing::warn!(org_id = %organization_id, project_id = %project_id, "activity catch up returned no events"); - return Err(CatchUpError::Stale); - } - - for event in events { - if event.seq <= current_seq { - continue; - } - if event.seq > target_seq { - return Ok(current_seq); - } - if send_activity(sender, &event).await.is_err() { - return Err(CatchUpError::Send); - } - current_seq = event.seq; - cursor = event.seq; - } - - if current_seq >= target_seq { - break; - } - } - - Ok(current_seq) -} diff --git a/crates/server/Cargo.toml b/crates/server/Cargo.toml index de1426d6..9f76b2b0 100644 --- a/crates/server/Cargo.toml +++ b/crates/server/Cargo.toml @@ -11,6 +11,7 @@ uninlined-format-args = "allow" deployment = { path = "../deployment" } executors = { path = "../executors" } local-deployment = { path = "../local-deployment" } +remote = { path = "../remote" } utils = { path = "../utils" } db = { path = "../db" } services = { path = "../services" } diff --git a/crates/server/src/bin/generate_types.rs b/crates/server/src/bin/generate_types.rs index dc63e5cf..e3648dac 100644 --- a/crates/server/src/bin/generate_types.rs +++ b/crates/server/src/bin/generate_types.rs @@ -10,44 +10,46 @@ fn generate_types_content() -> String { // If you are an AI, and you absolutely have to edit this file, please confirm with the user first."; let decls: Vec = vec![ - services::services::filesystem::DirectoryEntry::decl(), - services::services::filesystem::DirectoryListResponse::decl(), + remote::routes::tasks::SharedTaskResponse::decl(), + remote::routes::tasks::AssigneesQuery::decl(), + remote::db::tasks::SharedTask::decl(), + remote::db::users::UserData::decl(), db::models::project::Project::decl(), db::models::project::CreateProject::decl(), db::models::project::UpdateProject::decl(), db::models::project::SearchResult::decl(), db::models::project::SearchMatchType::decl(), - server::routes::projects::CreateRemoteProjectRequest::decl(), - server::routes::projects::LinkToExistingRequest::decl(), - executors::actions::ExecutorAction::decl(), - executors::mcp_config::McpConfig::decl(), - executors::actions::ExecutorActionType::decl(), - executors::actions::script::ScriptContext::decl(), - executors::actions::script::ScriptRequest::decl(), - executors::actions::script::ScriptRequestLanguage::decl(), - executors::executors::BaseCodingAgent::decl(), - executors::executors::CodingAgent::decl(), db::models::tag::Tag::decl(), db::models::tag::CreateTag::decl(), db::models::tag::UpdateTag::decl(), - server::routes::tags::TagSearchParams::decl(), db::models::task::TaskStatus::decl(), db::models::task::Task::decl(), db::models::task::TaskWithAttemptStatus::decl(), db::models::task::TaskRelationships::decl(), db::models::task::CreateTask::decl(), db::models::task::UpdateTask::decl(), - db::models::shared_task::SharedTask::decl(), db::models::scratch::DraftFollowUpData::decl(), db::models::scratch::ScratchPayload::decl(), db::models::scratch::ScratchType::decl(), db::models::scratch::Scratch::decl(), db::models::scratch::CreateScratch::decl(), db::models::scratch::UpdateScratch::decl(), - services::services::queued_message::QueuedMessage::decl(), - services::services::queued_message::QueueStatus::decl(), db::models::image::Image::decl(), db::models::image::CreateImage::decl(), + db::models::task_attempt::TaskAttempt::decl(), + db::models::execution_process::ExecutionProcess::decl(), + db::models::execution_process::ExecutionProcessStatus::decl(), + db::models::execution_process::ExecutionProcessRunReason::decl(), + db::models::merge::Merge::decl(), + db::models::merge::DirectMerge::decl(), + db::models::merge::PrMerge::decl(), + db::models::merge::MergeStatus::decl(), + db::models::merge::PullRequestInfo::decl(), + utils::approvals::ApprovalStatus::decl(), + utils::approvals::CreateApprovalRequest::decl(), + utils::approvals::ApprovalResponse::decl(), + utils::diff::Diff::decl(), + utils::diff::DiffChangeKind::decl(), utils::response::ApiResponse::<()>::decl(), utils::api::oauth::LoginStatus::decl(), utils::api::oauth::ProfileResponse::decl(), @@ -77,6 +79,10 @@ fn generate_types_content() -> String { utils::api::projects::RemoteProject::decl(), utils::api::projects::ListProjectsResponse::decl(), utils::api::projects::RemoteProjectMembersResponse::decl(), + server::routes::projects::CreateRemoteProjectRequest::decl(), + server::routes::projects::LinkToExistingRequest::decl(), + server::routes::tags::TagSearchParams::decl(), + server::routes::oauth::TokenResponse::decl(), server::routes::config::UserSystemInfo::decl(), server::routes::config::Environment::decl(), server::routes::config::McpServerQuery::decl(), @@ -85,7 +91,7 @@ fn generate_types_content() -> String { server::routes::config::CheckEditorAvailabilityQuery::decl(), server::routes::config::CheckEditorAvailabilityResponse::decl(), server::routes::config::CheckAgentAvailabilityQuery::decl(), - executors::executors::AvailabilityInfo::decl(), + server::routes::oauth::CurrentUserResponse::decl(), server::routes::task_attempts::CreateFollowUpAttempt::decl(), server::routes::task_attempts::ChangeTargetBranchRequest::decl(), server::routes::task_attempts::ChangeTargetBranchResponse::decl(), @@ -95,12 +101,22 @@ fn generate_types_content() -> String { server::routes::task_attempts::OpenEditorRequest::decl(), server::routes::task_attempts::OpenEditorResponse::decl(), server::routes::shared_tasks::AssignSharedTaskRequest::decl(), - server::routes::shared_tasks::AssignSharedTaskResponse::decl(), server::routes::tasks::ShareTaskResponse::decl(), server::routes::tasks::CreateAndStartTaskRequest::decl(), server::routes::task_attempts::CreateGitHubPrRequest::decl(), server::routes::images::ImageResponse::decl(), server::routes::images::ImageMetadata::decl(), + server::routes::task_attempts::CreateTaskAttemptBody::decl(), + server::routes::task_attempts::RunAgentSetupRequest::decl(), + server::routes::task_attempts::RunAgentSetupResponse::decl(), + server::routes::task_attempts::gh_cli_setup::GhCliSetupError::decl(), + server::routes::task_attempts::RebaseTaskAttemptRequest::decl(), + server::routes::task_attempts::GitOperationError::decl(), + server::routes::task_attempts::PushError::decl(), + server::routes::task_attempts::CreatePrError::decl(), + server::routes::task_attempts::BranchStatus::decl(), + services::services::filesystem::DirectoryEntry::decl(), + services::services::filesystem::DirectoryListResponse::decl(), services::services::config::Config::decl(), services::services::config::NotificationConfig::decl(), services::services::config::ThemeMode::decl(), @@ -112,8 +128,19 @@ fn generate_types_content() -> String { services::services::config::UiLanguage::decl(), services::services::config::ShowcaseState::decl(), services::services::git::GitBranch::decl(), - utils::diff::Diff::decl(), - utils::diff::DiffChangeKind::decl(), + services::services::share::SharedTaskDetails::decl(), + services::services::queued_message::QueuedMessage::decl(), + services::services::queued_message::QueueStatus::decl(), + services::services::git::ConflictOp::decl(), + executors::actions::ExecutorAction::decl(), + executors::mcp_config::McpConfig::decl(), + executors::actions::ExecutorActionType::decl(), + executors::actions::script::ScriptContext::decl(), + executors::actions::script::ScriptRequest::decl(), + executors::actions::script::ScriptRequestLanguage::decl(), + executors::executors::BaseCodingAgent::decl(), + executors::executors::CodingAgent::decl(), + executors::executors::AvailabilityInfo::decl(), executors::command::CommandBuilder::decl(), executors::profile::ExecutorProfileId::decl(), executors::profile::ExecutorConfig::decl(), @@ -138,25 +165,6 @@ fn generate_types_content() -> String { executors::executors::AppendPrompt::decl(), executors::actions::coding_agent_initial::CodingAgentInitialRequest::decl(), executors::actions::coding_agent_follow_up::CodingAgentFollowUpRequest::decl(), - server::routes::task_attempts::CreateTaskAttemptBody::decl(), - server::routes::task_attempts::RunAgentSetupRequest::decl(), - server::routes::task_attempts::RunAgentSetupResponse::decl(), - server::routes::task_attempts::gh_cli_setup::GhCliSetupError::decl(), - server::routes::task_attempts::RebaseTaskAttemptRequest::decl(), - server::routes::task_attempts::GitOperationError::decl(), - server::routes::task_attempts::PushError::decl(), - server::routes::task_attempts::CreatePrError::decl(), - server::routes::task_attempts::BranchStatus::decl(), - services::services::git::ConflictOp::decl(), - db::models::task_attempt::TaskAttempt::decl(), - db::models::execution_process::ExecutionProcess::decl(), - db::models::execution_process::ExecutionProcessStatus::decl(), - db::models::execution_process::ExecutionProcessRunReason::decl(), - db::models::merge::Merge::decl(), - db::models::merge::DirectMerge::decl(), - db::models::merge::PrMerge::decl(), - db::models::merge::MergeStatus::decl(), - db::models::merge::PullRequestInfo::decl(), executors::logs::CommandExitStatus::decl(), executors::logs::CommandRunResult::decl(), executors::logs::NormalizedEntry::decl(), @@ -169,9 +177,6 @@ fn generate_types_content() -> String { executors::logs::ToolResultValueType::decl(), executors::logs::ToolStatus::decl(), executors::logs::utils::patch::PatchType::decl(), - utils::approvals::ApprovalStatus::decl(), - utils::approvals::CreateApprovalRequest::decl(), - utils::approvals::ApprovalResponse::decl(), serde_json::Value::decl(), ]; diff --git a/crates/server/src/error.rs b/crates/server/src/error.rs index a5a234c4..ccbead99 100644 --- a/crates/server/src/error.rs +++ b/crates/server/src/error.rs @@ -286,10 +286,6 @@ impl From for ApiError { tracing::error!(?err, "share task URL error"); ApiError::Conflict("Share service URL is invalid".to_string()) } - ShareError::WebSocket(err) => { - tracing::error!(?err, "share task websocket error"); - ApiError::Conflict("Unexpected websocket error during sharing".to_string()) - } ShareError::InvalidResponse => ApiError::Conflict( "Remote share service returned an unexpected response".to_string(), ), diff --git a/crates/server/src/main.rs b/crates/server/src/main.rs index c608a3a9..84bb2bbc 100644 --- a/crates/server/src/main.rs +++ b/crates/server/src/main.rs @@ -73,6 +73,16 @@ async fn main() -> Result<(), VibeKanbanError> { } }); + // Verify shared tasks in background + let deployment_for_verification = deployment.clone(); + tokio::spawn(async move { + if let Some(publisher) = deployment_for_verification.container().share_publisher() + && let Err(e) = publisher.cleanup_shared_tasks().await + { + tracing::warn!("Failed to verify shared tasks: {}", e); + } + }); + let app_router = routes::router(deployment.clone()); let port = std::env::var("BACKEND_PORT") diff --git a/crates/server/src/mcp/task_server.rs b/crates/server/src/mcp/task_server.rs index ad18faf7..4d791346 100644 --- a/crates/server/src/mcp/task_server.rs +++ b/crates/server/src/mcp/task_server.rs @@ -468,7 +468,7 @@ impl TaskServer { Ok(s) => Some(s), Err(_) => { return Self::err( - "Invalid status filter. Valid values: 'todo', 'in-progress', 'in-review', 'done', 'cancelled'".to_string(), + "Invalid status filter. Valid values: 'todo', 'inprogress', 'inreview', 'done', 'cancelled'".to_string(), Some(status_str.to_string()), ); } @@ -595,7 +595,7 @@ impl TaskServer { Ok(s) => Some(s), Err(_) => { return Self::err( - "Invalid status filter. Valid values: 'todo', 'in-progress', 'in-review', 'done', 'cancelled'".to_string(), + "Invalid status filter. Valid values: 'todo', 'inprogress', 'inreview', 'done', 'cancelled'".to_string(), Some(status_str.to_string()), ); } diff --git a/crates/server/src/routes/oauth.rs b/crates/server/src/routes/oauth.rs index 04612296..bd48c3f0 100644 --- a/crates/server/src/routes/oauth.rs +++ b/crates/server/src/routes/oauth.rs @@ -5,11 +5,14 @@ use axum::{ response::Json as ResponseJson, routing::{get, post}, }; +use chrono::{DateTime, Utc}; use deployment::Deployment; use rand::{Rng, distributions::Alphanumeric}; use serde::{Deserialize, Serialize}; use services::services::{config::save_config_to_file, oauth_credentials::Credentials}; use sha2::{Digest, Sha256}; +use tokio; +use ts_rs::TS; use utils::{ api::oauth::{HandoffInitRequest, HandoffRedeemRequest, StatusResponse}, assets::config_path, @@ -20,12 +23,29 @@ use uuid::Uuid; use crate::{DeploymentImpl, error::ApiError}; +/// Response from GET /api/auth/token - returns the current access token +#[derive(Debug, Serialize, TS)] +#[ts(export)] +pub struct TokenResponse { + pub access_token: String, + pub expires_at: Option>, +} + +/// Response from GET /api/auth/user - returns the current user ID +#[derive(Debug, Serialize, TS)] +#[ts(export)] +pub struct CurrentUserResponse { + pub user_id: String, +} + pub fn router() -> Router { Router::new() .route("/auth/handoff/init", post(handoff_init)) .route("/auth/handoff/complete", get(handoff_complete)) .route("/auth/logout", post(logout)) .route("/auth/status", get(status)) + .route("/auth/token", get(get_token)) + .route("/auth/user", get(get_current_user)) } #[derive(Debug, Deserialize)] @@ -188,22 +208,13 @@ async fn handoff_complete( ); } - // Start remote sync if not already running - { - let handle_guard = deployment.share_sync_handle().lock().await; - let should_start = handle_guard.is_none(); - drop(handle_guard); - - if should_start { - if let Some(share_config) = deployment.share_config() { - tracing::info!("Starting remote sync after login"); - deployment.spawn_remote_sync(share_config.clone()); - } else { - tracing::debug!( - "Share config not available; skipping remote sync spawn after login" - ); + // Trigger shared task cleanup in background + if let Ok(publisher) = deployment.share_publisher() { + tokio::spawn(async move { + if let Err(e) = publisher.cleanup_shared_tasks().await { + tracing::error!("Failed to cleanup shared tasks on login: {}", e); } - } + }); } Ok(close_window_response(format!( @@ -212,12 +223,6 @@ async fn handoff_complete( } async fn logout(State(deployment): State) -> Result { - // Stop remote sync if running - if let Some(handle) = deployment.share_sync_handle().lock().await.take() { - tracing::info!("Stopping remote sync due to logout"); - handle.shutdown().await; - } - let auth_context = deployment.auth_context(); if let Ok(client) = deployment.remote_client() { @@ -255,6 +260,51 @@ async fn status( } } +/// Returns the current access token (auto-refreshes if needed) +async fn get_token( + State(deployment): State, +) -> Result>, ApiError> { + let remote_client = deployment.remote_client()?; + + // This will auto-refresh the token if expired + let access_token = remote_client + .access_token() + .await + .map_err(|_| ApiError::Unauthorized)?; + + let creds = deployment.auth_context().get_credentials().await; + let expires_at = creds.and_then(|c| c.expires_at); + + Ok(ResponseJson(ApiResponse::success(TokenResponse { + access_token, + expires_at, + }))) +} + +async fn get_current_user( + State(deployment): State, +) -> Result>, ApiError> { + let remote_client = deployment.remote_client()?; + + // Get the access token from remote client + let access_token = remote_client + .access_token() + .await + .map_err(|_| ApiError::Unauthorized)?; + + // Extract user ID from the JWT token's 'sub' claim + let user_id = utils::jwt::extract_subject(&access_token) + .map_err(|e| { + tracing::error!("Failed to extract user ID from token: {}", e); + ApiError::Unauthorized + })? + .to_string(); + + Ok(ResponseJson(ApiResponse::success(CurrentUserResponse { + user_id, + }))) +} + fn generate_secret() -> String { rand::thread_rng() .sample_iter(&Alphanumeric) diff --git a/crates/server/src/routes/projects.rs b/crates/server/src/routes/projects.rs index e87573d8..6096ba0a 100644 --- a/crates/server/src/routes/projects.rs +++ b/crates/server/src/routes/projects.rs @@ -20,7 +20,6 @@ use services::services::{ file_search_cache::{CacheError, SearchMode, SearchQuery}, git::GitBranch, remote_client::CreateRemoteProjectPayload, - share::link_shared_tasks_to_project, }; use ts_rs::TS; use utils::{ @@ -65,7 +64,7 @@ pub async fn get_project_branches( } pub async fn link_project_to_existing_remote( - Path(project_id): Path, + Extension(project): Extension, State(deployment): State, Json(payload): Json, ) -> Result>, ApiError> { @@ -73,14 +72,13 @@ pub async fn link_project_to_existing_remote( let remote_project = client.get_project(payload.remote_project_id).await?; - let updated_project = - apply_remote_project_link(&deployment, project_id, remote_project).await?; + let updated_project = apply_remote_project_link(&deployment, project, remote_project).await?; Ok(ResponseJson(ApiResponse::success(updated_project))) } pub async fn create_and_link_remote_project( - Path(project_id): Path, + Extension(project): Extension, State(deployment): State, Json(payload): Json, ) -> Result>, ApiError> { @@ -101,8 +99,7 @@ pub async fn create_and_link_remote_project( }) .await?; - let updated_project = - apply_remote_project_link(&deployment, project_id, remote_project).await?; + let updated_project = apply_remote_project_link(&deployment, project, remote_project).await?; Ok(ResponseJson(ApiResponse::success(updated_project))) } @@ -167,26 +164,28 @@ pub async fn get_project_remote_members( async fn apply_remote_project_link( deployment: &DeploymentImpl, - project_id: Uuid, + project: Project, remote_project: RemoteProject, ) -> Result { let pool = &deployment.db().pool; - Project::set_remote_project_id(pool, project_id, Some(remote_project.id)).await?; + if project.remote_project_id.is_some() { + return Err(ApiError::Conflict( + "Project is already linked to a remote project. Unlink it first.".to_string(), + )); + } - let updated_project = Project::find_by_id(pool, project_id) + Project::set_remote_project_id(pool, project.id, Some(remote_project.id)).await?; + + let updated_project = Project::find_by_id(pool, project.id) .await? .ok_or(ProjectError::ProjectNotFound)?; - let current_profile = deployment.auth_context().cached_profile().await; - let current_user_id = current_profile.as_ref().map(|p| p.user_id); - link_shared_tasks_to_project(pool, current_user_id, project_id, remote_project.id).await?; - deployment .track_if_analytics_allowed( "project_linked_to_remote", serde_json::json!({ - "project_id": project_id.to_string(), + "project_id": project.id.to_string(), }), ) .await; diff --git a/crates/server/src/routes/shared_tasks.rs b/crates/server/src/routes/shared_tasks.rs index b0dd9afd..6e1177d6 100644 --- a/crates/server/src/routes/shared_tasks.rs +++ b/crates/server/src/routes/shared_tasks.rs @@ -4,10 +4,11 @@ use axum::{ response::Json as ResponseJson, routing::{delete, post}, }; -use db::models::shared_task::SharedTask; +use db::models::task::Task; use deployment::Deployment; -use serde::{Deserialize, Serialize}; -use services::services::share::ShareError; +use remote::routes::tasks::SharedTaskResponse; +use serde::Deserialize; +use services::services::share::{ShareError, SharedTaskDetails}; use ts_rs::TS; use utils::response::ApiResponse; use uuid::Uuid; @@ -18,13 +19,6 @@ use crate::{DeploymentImpl, error::ApiError}; #[ts(export)] pub struct AssignSharedTaskRequest { pub new_assignee_user_id: Option, - pub version: Option, -} - -#[derive(Debug, Clone, Serialize, TS)] -#[ts(export)] -pub struct AssignSharedTaskResponse { - pub shared_task: SharedTask, } pub fn router() -> Router { @@ -34,27 +28,23 @@ pub fn router() -> Router { post(assign_shared_task), ) .route("/shared-tasks/{shared_task_id}", delete(delete_shared_task)) + .route( + "/shared-tasks/link-to-local", + post(link_shared_task_to_local), + ) } pub async fn assign_shared_task( Path(shared_task_id): Path, State(deployment): State, Json(payload): Json, -) -> Result>, ApiError> { +) -> Result>, ApiError> { let Ok(publisher) = deployment.share_publisher() else { return Err(ShareError::MissingConfig("share publisher unavailable").into()); }; - let shared_task = SharedTask::find_by_id(&deployment.db().pool, shared_task_id) - .await? - .ok_or_else(|| ApiError::Conflict("shared task not found".into()))?; - let updated_shared_task = publisher - .assign_shared_task( - &shared_task, - payload.new_assignee_user_id.clone(), - payload.version, - ) + .assign_shared_task(shared_task_id, payload.new_assignee_user_id.clone()) .await?; let props = serde_json::json!({ @@ -65,11 +55,7 @@ pub async fn assign_shared_task( .track_if_analytics_allowed("reassign_shared_task", props) .await; - Ok(ResponseJson(ApiResponse::success( - AssignSharedTaskResponse { - shared_task: updated_shared_task, - }, - ))) + Ok(ResponseJson(ApiResponse::success(updated_shared_task))) } pub async fn delete_shared_task( @@ -91,3 +77,27 @@ pub async fn delete_shared_task( Ok(ResponseJson(ApiResponse::success(()))) } + +pub async fn link_shared_task_to_local( + State(deployment): State, + Json(shared_task_details): Json, +) -> Result>>, ApiError> { + let Ok(publisher) = deployment.share_publisher() else { + return Err(ShareError::MissingConfig("share publisher unavailable").into()); + }; + + let task = publisher.link_shared_task(shared_task_details).await?; + + if let Some(ref task) = task { + let props = serde_json::json!({ + "shared_task_id": task.shared_task_id, + "task_id": task.id, + "project_id": task.project_id, + }); + deployment + .track_if_analytics_allowed("link_shared_task_to_local", props) + .await; + } + + Ok(ResponseJson(ApiResponse::success(task))) +} diff --git a/crates/server/src/routes/tasks.rs b/crates/server/src/routes/tasks.rs index a12463e4..9ec95b9d 100644 --- a/crates/server/src/routes/tasks.rs +++ b/crates/server/src/routes/tasks.rs @@ -28,7 +28,7 @@ use services::services::{ }; use sqlx::Error as SqlxError; use ts_rs::TS; -use utils::response::ApiResponse; +use utils::{api::oauth::LoginStatus, response::ApiResponse}; use uuid::Uuid; use crate::{DeploymentImpl, error::ApiError, middleware::load_task_middleware}; @@ -220,6 +220,8 @@ pub async fn update_task( Json(payload): Json, ) -> Result>, ApiError> { + ensure_shared_task_auth(&existing_task, &deployment).await?; + // Use existing values if not provided in update let title = payload.title.unwrap_or(existing_task.title); let description = match payload.description { @@ -259,10 +261,27 @@ pub async fn update_task( Ok(ResponseJson(ApiResponse::success(task))) } +async fn ensure_shared_task_auth( + existing_task: &Task, + deployment: &local_deployment::LocalDeployment, +) -> Result<(), ApiError> { + if existing_task.shared_task_id.is_some() { + match deployment.get_login_status().await { + LoginStatus::LoggedIn { .. } => return Ok(()), + LoginStatus::LoggedOut => { + return Err(ShareError::MissingAuth.into()); + } + } + } + Ok(()) +} + pub async fn delete_task( Extension(task): Extension, State(deployment): State, ) -> Result<(StatusCode, ResponseJson>), ApiError> { + ensure_shared_task_auth(&task, &deployment).await?; + // Validate no running execution processes if deployment .container() diff --git a/crates/services/src/services/events.rs b/crates/services/src/services/events.rs index c8ed9fa8..f643876c 100644 --- a/crates/services/src/services/events.rs +++ b/crates/services/src/services/events.rs @@ -3,8 +3,8 @@ use std::{str::FromStr, sync::Arc}; use db::{ DBService, models::{ - execution_process::ExecutionProcess, scratch::Scratch, - shared_task::SharedTask as SharedDbTask, task::Task, task_attempt::TaskAttempt, + execution_process::ExecutionProcess, scratch::Scratch, task::Task, + task_attempt::TaskAttempt, }, }; use serde_json::json; @@ -20,9 +20,7 @@ mod streams; #[path = "events/types.rs"] pub mod types; -pub use patches::{ - execution_process_patch, scratch_patch, shared_task_patch, task_attempt_patch, task_patch, -}; +pub use patches::{execution_process_patch, scratch_patch, task_attempt_patch, task_patch}; pub use types::{EventError, EventPatch, EventPatchInner, HookTables, RecordTypes}; #[derive(Clone)] @@ -125,14 +123,6 @@ impl EventService { msg_store_for_preupdate.push_patch(patch); } } - "shared_tasks" => { - if let Ok(value) = preupdate.get_old_column_value(0) - && let Ok(task_id) = >::decode(value) - { - let patch = shared_task_patch::remove(task_id); - msg_store_for_preupdate.push_patch(patch); - } - } "scratch" => { // Composite key: need both id (column 0) and scratch_type (column 1) if let Ok(id_val) = preupdate.get_old_column_value(0) @@ -163,27 +153,10 @@ impl EventService { (HookTables::Tasks, SqliteOperation::Delete) | (HookTables::TaskAttempts, SqliteOperation::Delete) | (HookTables::ExecutionProcesses, SqliteOperation::Delete) - | (HookTables::SharedTasks, SqliteOperation::Delete) | (HookTables::Scratch, SqliteOperation::Delete) => { // Deletions handled in preupdate hook for reliable data capture return; } - (HookTables::SharedTasks, _) => { - match SharedDbTask::find_by_rowid(&db.pool, rowid).await { - Ok(Some(task)) => RecordTypes::SharedTask(task), - Ok(None) => RecordTypes::DeletedSharedTask { - rowid, - task_id: None, - }, - Err(e) => { - tracing::error!( - "Failed to fetch shared_task: {:?}", - e - ); - return; - } - } - } (HookTables::Tasks, _) => { match Task::find_by_rowid(&db.pool, rowid).await { Ok(Some(task)) => RecordTypes::Task(task), @@ -280,15 +253,6 @@ impl EventService { return; } } - RecordTypes::SharedTask(task) => { - let patch = match hook.operation { - SqliteOperation::Insert => shared_task_patch::add(task), - SqliteOperation::Update => shared_task_patch::replace(task), - _ => shared_task_patch::replace(task), - }; - msg_store_for_hook.push_patch(patch); - return; - } RecordTypes::DeletedTask { task_id: Some(task_id), .. @@ -297,14 +261,6 @@ impl EventService { msg_store_for_hook.push_patch(patch); return; } - RecordTypes::DeletedSharedTask { - task_id: Some(task_id), - .. - } => { - let patch = shared_task_patch::remove(*task_id); - msg_store_for_hook.push_patch(patch); - return; - } RecordTypes::Scratch(scratch) => { let patch = match hook.operation { SqliteOperation::Insert => scratch_patch::add(scratch), diff --git a/crates/services/src/services/events/patches.rs b/crates/services/src/services/events/patches.rs index daa98f9e..4db4db61 100644 --- a/crates/services/src/services/events/patches.rs +++ b/crates/services/src/services/events/patches.rs @@ -1,6 +1,6 @@ use db::models::{ - execution_process::ExecutionProcess, scratch::Scratch, shared_task::SharedTask as DbSharedTask, - task::TaskWithAttemptStatus, task_attempt::TaskAttempt, + execution_process::ExecutionProcess, scratch::Scratch, task::TaskWithAttemptStatus, + task_attempt::TaskAttempt, }; use json_patch::{AddOperation, Patch, PatchOperation, RemoveOperation, ReplaceOperation}; use uuid::Uuid; @@ -48,44 +48,6 @@ pub mod task_patch { } } -/// Helper functions for creating shared task-specific patches -pub mod shared_task_patch { - use super::*; - - fn shared_task_path(task_id: Uuid) -> String { - format!( - "/shared_tasks/{}", - escape_pointer_segment(&task_id.to_string()) - ) - } - - pub fn add(task: &DbSharedTask) -> Patch { - Patch(vec![PatchOperation::Add(AddOperation { - path: shared_task_path(task.id) - .try_into() - .expect("Shared task path should be valid"), - value: serde_json::to_value(task).expect("Shared task serialization should not fail"), - })]) - } - - pub fn replace(task: &DbSharedTask) -> Patch { - Patch(vec![PatchOperation::Replace(ReplaceOperation { - path: shared_task_path(task.id) - .try_into() - .expect("Shared task path should be valid"), - value: serde_json::to_value(task).expect("Shared task serialization should not fail"), - })]) - } - - pub fn remove(task_id: Uuid) -> Patch { - Patch(vec![PatchOperation::Remove(RemoveOperation { - path: shared_task_path(task_id) - .try_into() - .expect("Shared task path should be valid"), - })]) - } -} - /// Helper functions for creating execution process-specific patches pub mod execution_process_patch { use super::*; diff --git a/crates/services/src/services/events/streams.rs b/crates/services/src/services/events/streams.rs index f367e2ac..09ce64bd 100644 --- a/crates/services/src/services/events/streams.rs +++ b/crates/services/src/services/events/streams.rs @@ -1,8 +1,6 @@ use db::models::{ execution_process::ExecutionProcess, - project::Project, scratch::Scratch, - shared_task::SharedTask, task::{Task, TaskWithAttemptStatus}, }; use futures::StreamExt; @@ -33,37 +31,17 @@ impl EventService { .map(|task| (task.id.to_string(), serde_json::to_value(task).unwrap())) .collect(); - let remote_project_id = Project::find_by_id(&self.db.pool, project_id) - .await? - .and_then(|project| project.remote_project_id); - - let shared_tasks = if let Some(remote_project_id) = remote_project_id { - SharedTask::list_by_remote_project_id(&self.db.pool, remote_project_id).await? - } else { - Vec::new() - }; - let shared_tasks_map: serde_json::Map = shared_tasks - .into_iter() - .map(|task| (task.id.to_string(), serde_json::to_value(task).unwrap())) - .collect(); - let initial_patch = json!([ { "op": "replace", "path": "/tasks", "value": tasks_map - }, - { - "op": "replace", - "path": "/shared_tasks", - "value": shared_tasks_map } ]); let initial_msg = LogMsg::JsonPatch(serde_json::from_value(initial_patch).unwrap()); // Clone necessary data for the async filter let db_pool = self.db.pool.clone(); - let remote_project_id_filter = remote_project_id; // Get filtered event stream let filtered_stream = @@ -74,44 +52,6 @@ impl EventService { Ok(LogMsg::JsonPatch(patch)) => { // Filter events based on project_id if let Some(patch_op) = patch.0.first() { - if patch_op.path().starts_with("/shared_tasks/") { - match patch_op { - json_patch::PatchOperation::Add(op) => { - if let Ok(shared_task) = - serde_json::from_value::( - op.value.clone(), - ) - && remote_project_id_filter - .map(|expected| { - shared_task.remote_project_id == expected - }) - .unwrap_or(false) - { - return Some(Ok(LogMsg::JsonPatch(patch))); - } - } - json_patch::PatchOperation::Replace(op) => { - if let Ok(shared_task) = - serde_json::from_value::( - op.value.clone(), - ) - && remote_project_id_filter - .map(|expected| { - shared_task.remote_project_id == expected - }) - .unwrap_or(false) - { - return Some(Ok(LogMsg::JsonPatch(patch))); - } - } - json_patch::PatchOperation::Remove(_) => { - // Forward removals; clients will ignore missing tasks - return Some(Ok(LogMsg::JsonPatch(patch))); - } - _ => {} - } - return None; - } // Check if this is a direct task patch (new format) if patch_op.path().starts_with("/tasks/") { match patch_op { @@ -165,19 +105,6 @@ impl EventService { return Some(Ok(LogMsg::JsonPatch(patch))); } } - RecordTypes::SharedTask(shared_task) => { - if remote_project_id_filter - .map(|expected| { - shared_task.remote_project_id == expected - }) - .unwrap_or(false) - { - return Some(Ok(LogMsg::JsonPatch(patch))); - } - } - RecordTypes::DeletedSharedTask { .. } => { - return Some(Ok(LogMsg::JsonPatch(patch))); - } RecordTypes::TaskAttempt(attempt) => { // Check if this task_attempt belongs to a task in our project if let Ok(Some(task)) = diff --git a/crates/services/src/services/events/types.rs b/crates/services/src/services/events/types.rs index 4c7a262a..c6636e4c 100644 --- a/crates/services/src/services/events/types.rs +++ b/crates/services/src/services/events/types.rs @@ -1,7 +1,6 @@ use anyhow::Error as AnyhowError; use db::models::{ - execution_process::ExecutionProcess, scratch::Scratch, shared_task::SharedTask, task::Task, - task_attempt::TaskAttempt, + execution_process::ExecutionProcess, scratch::Scratch, task::Task, task_attempt::TaskAttempt, }; use serde::{Deserialize, Serialize}; use sqlx::Error as SqlxError; @@ -28,8 +27,6 @@ pub enum HookTables { TaskAttempts, #[strum(to_string = "execution_processes")] ExecutionProcesses, - #[strum(to_string = "shared_tasks")] - SharedTasks, #[strum(to_string = "scratch")] Scratch, } @@ -40,7 +37,6 @@ pub enum RecordTypes { Task(Task), TaskAttempt(TaskAttempt), ExecutionProcess(ExecutionProcess), - SharedTask(SharedTask), Scratch(Scratch), DeletedTask { rowid: i64, @@ -56,10 +52,6 @@ pub enum RecordTypes { task_attempt_id: Option, process_id: Option, }, - DeletedSharedTask { - rowid: i64, - task_id: Option, - }, DeletedScratch { rowid: i64, scratch_id: Option, diff --git a/crates/services/src/services/remote_client.rs b/crates/services/src/services/remote_client.rs index 691c76b6..577106d3 100644 --- a/crates/services/src/services/remote_client.rs +++ b/crates/services/src/services/remote_client.rs @@ -4,12 +4,9 @@ use std::time::Duration; use backon::{ExponentialBuilder, Retryable}; use chrono::Duration as ChronoDuration; -use remote::{ - activity::ActivityResponse, - routes::tasks::{ - AssignSharedTaskRequest, BulkSharedTasksResponse, CreateSharedTaskRequest, - DeleteSharedTaskRequest, SharedTaskResponse, UpdateSharedTaskRequest, - }, +use remote::routes::tasks::{ + AssignSharedTaskRequest, CheckTasksRequest, CreateSharedTaskRequest, SharedTaskResponse, + UpdateSharedTaskRequest, }; use reqwest::{Client, StatusCode}; use serde::{Deserialize, Serialize}; @@ -580,14 +577,13 @@ impl RemoteClient { pub async fn delete_shared_task( &self, task_id: Uuid, - request: &DeleteSharedTaskRequest, ) -> Result { let res = self .send( reqwest::Method::DELETE, &format!("/v1/tasks/{task_id}"), true, - Some(request), + None::<&()>, ) .await?; res.json::() @@ -595,27 +591,10 @@ impl RemoteClient { .map_err(|e| RemoteClientError::Serde(e.to_string())) } - /// Fetches activity events for a project. - pub async fn fetch_activity( - &self, - project_id: Uuid, - after: Option, - limit: u32, - ) -> Result { - let mut path = format!("/v1/activity?project_id={project_id}&limit={limit}"); - if let Some(seq) = after { - path.push_str(&format!("&after={seq}")); - } - self.get_authed(&path).await - } - - /// Fetches bulk snapshot of shared tasks for a project. - pub async fn fetch_bulk_snapshot( - &self, - project_id: Uuid, - ) -> Result { - self.get_authed(&format!("/v1/tasks/bulk?project_id={project_id}")) - .await + /// Checks if shared tasks exist. + pub async fn check_tasks(&self, task_ids: Vec) -> Result, RemoteClientError> { + let request = CheckTasksRequest { task_ids }; + self.post_authed("/v1/tasks/check", Some(&request)).await } } diff --git a/crates/services/src/services/share.rs b/crates/services/src/services/share.rs index 37f580ea..92172faf 100644 --- a/crates/services/src/services/share.rs +++ b/crates/services/src/services/share.rs @@ -1,51 +1,15 @@ mod config; -mod processor; mod publisher; mod status; -use std::{ - collections::{HashMap, HashSet}, - io, - sync::{Arc, Mutex as StdMutex}, - time::Duration, -}; - -use async_trait::async_trait; -use axum::http::{HeaderName, HeaderValue, header::AUTHORIZATION}; pub use config::ShareConfig; -use db::{ - DBService, - models::{ - shared_task::{SharedActivityCursor, SharedTask, SharedTaskInput}, - task::{SyncTask, Task}, - }, -}; -use processor::ActivityProcessor; -pub use publisher::SharePublisher; -use remote::{ - ClientMessage, ServerMessage, - db::{tasks::SharedTask as RemoteSharedTask, users::UserData as RemoteUserData}, -}; -use sqlx::{Executor, Sqlite, SqlitePool}; +pub use publisher::{SharePublisher, SharedTaskDetails}; use thiserror::Error; -use tokio::{ - sync::{mpsc, oneshot}, - task::JoinHandle, - time::{MissedTickBehavior, interval, sleep}, -}; -use tokio_tungstenite::tungstenite::Message as WsMessage; -use url::Url; -use utils::ws::{ - WS_AUTH_REFRESH_INTERVAL, WsClient, WsConfig, WsError, WsHandler, WsResult, run_ws_client, -}; use uuid::Uuid; use crate::{ RemoteClientError, - services::{ - auth::AuthContext, git::GitServiceError, github::GitHubServiceError, - remote_client::RemoteClient, - }, + services::{git::GitServiceError, github::GitHubServiceError}, }; #[derive(Debug, Error)] @@ -58,8 +22,6 @@ pub enum ShareError { Serialization(#[from] serde_json::Error), #[error(transparent)] Url(#[from] url::ParseError), - #[error(transparent)] - WebSocket(#[from] WsError), #[error("share configuration missing: {0}")] MissingConfig(&'static str), #[error("task {0} not found")] @@ -87,620 +49,3 @@ pub enum ShareError { #[error(transparent)] RemoteClientError(#[from] RemoteClientError), } - -const WS_BACKOFF_BASE_DELAY: Duration = Duration::from_secs(1); -const WS_BACKOFF_MAX_DELAY: Duration = Duration::from_secs(30); - -struct Backoff { - current: Duration, -} - -impl Backoff { - fn new() -> Self { - Self { - current: WS_BACKOFF_BASE_DELAY, - } - } - - fn reset(&mut self) { - self.current = WS_BACKOFF_BASE_DELAY; - } - - async fn wait(&mut self) { - let wait = self.current; - sleep(wait).await; - let doubled = wait.checked_mul(2).unwrap_or(WS_BACKOFF_MAX_DELAY); - self.current = std::cmp::min(doubled, WS_BACKOFF_MAX_DELAY); - } -} - -struct ProjectWatcher { - shutdown: oneshot::Sender<()>, - join: JoinHandle<()>, -} - -struct ProjectWatcherEvent { - project_id: Uuid, - result: Result<(), ShareError>, -} - -pub struct RemoteSync { - db: DBService, - processor: ActivityProcessor, - config: ShareConfig, - auth_ctx: AuthContext, -} - -impl RemoteSync { - pub fn spawn(db: DBService, config: ShareConfig, auth_ctx: AuthContext) -> RemoteSyncHandle { - tracing::info!(api = %config.api_base, "starting shared task synchronizer"); - let remote_client = RemoteClient::new(config.api_base.as_str(), auth_ctx.clone()) - .expect("failed to create remote client"); - let processor = - ActivityProcessor::new(db.clone(), config.clone(), remote_client, auth_ctx.clone()); - let sync = Self { - db, - processor, - config, - auth_ctx, - }; - let (shutdown_tx, shutdown_rx) = oneshot::channel(); - let join = tokio::spawn(async move { - if let Err(e) = sync.run(shutdown_rx).await { - tracing::error!(?e, "remote sync terminated unexpectedly"); - } - }); - - RemoteSyncHandle::new(shutdown_tx, join) - } - - pub async fn run(self, mut shutdown_rx: oneshot::Receiver<()>) -> Result<(), ShareError> { - let mut watchers: HashMap = HashMap::new(); - let (event_tx, mut event_rx) = mpsc::unbounded_channel(); - let mut refresh_interval = interval(Duration::from_secs(5)); - refresh_interval.set_missed_tick_behavior(MissedTickBehavior::Skip); - - self.reconcile_watchers(&mut watchers, &event_tx).await?; - - loop { - tokio::select! { - _ = &mut shutdown_rx => { - tracing::info!("remote sync shutdown requested"); - for (project_id, watcher) in watchers.drain() { - tracing::info!(%project_id, "stopping watcher due to shutdown"); - let _ = watcher.shutdown.send(()); - tokio::spawn(async move { - if let Err(err) = watcher.join.await { - tracing::debug!(?err, %project_id, "project watcher join failed during shutdown"); - } - }); - } - return Ok(()); - } - Some(event) = event_rx.recv() => { - match event.result { - Ok(()) => { - tracing::debug!(project_id = %event.project_id, "project watcher exited cleanly"); - } - Err(err) => { - tracing::warn!(project_id = %event.project_id, ?err, "project watcher terminated with error"); - } - } - watchers.remove(&event.project_id); - } - _ = refresh_interval.tick() => { - self.reconcile_watchers(&mut watchers, &event_tx).await?; - } - } - } - } - - async fn reconcile_watchers( - &self, - watchers: &mut HashMap, - events_tx: &mpsc::UnboundedSender, - ) -> Result<(), ShareError> { - let linked_projects = self.linked_remote_projects().await?; - let desired: HashSet = linked_projects.iter().copied().collect(); - - for project_id in linked_projects { - if let std::collections::hash_map::Entry::Vacant(e) = watchers.entry(project_id) { - tracing::info!(%project_id, "starting watcher for linked remote project"); - let watcher = self - .spawn_project_watcher(project_id, events_tx.clone()) - .await?; - e.insert(watcher); - } - } - - let to_remove: Vec = watchers - .keys() - .copied() - .filter(|id| !desired.contains(id)) - .collect(); - - for project_id in to_remove { - if let Some(watcher) = watchers.remove(&project_id) { - tracing::info!(%project_id, "remote project unlinked; shutting down watcher"); - let _ = watcher.shutdown.send(()); - tokio::spawn(async move { - if let Err(err) = watcher.join.await { - tracing::debug!(?err, %project_id, "project watcher join failed during teardown"); - } - }); - } - } - - Ok(()) - } - - async fn linked_remote_projects(&self) -> Result, ShareError> { - let rows = sqlx::query_scalar::<_, Uuid>( - r#" - SELECT remote_project_id - FROM projects - WHERE remote_project_id IS NOT NULL - "#, - ) - .fetch_all(&self.db.pool) - .await?; - - Ok(rows) - } - - async fn spawn_project_watcher( - &self, - project_id: Uuid, - events_tx: mpsc::UnboundedSender, - ) -> Result { - let processor = self.processor.clone(); - let config = self.config.clone(); - let auth_ctx = self.auth_ctx.clone(); - let remote_client = processor.remote_client(); - let db = self.db.clone(); - let (shutdown_tx, shutdown_rx) = oneshot::channel(); - - let join = tokio::spawn(async move { - let result = project_watcher_task( - db, - processor, - config, - auth_ctx, - remote_client, - project_id, - shutdown_rx, - ) - .await; - - let _ = events_tx.send(ProjectWatcherEvent { project_id, result }); - }); - - Ok(ProjectWatcher { - shutdown: shutdown_tx, - join, - }) - } -} - -struct SharedWsHandler { - processor: ActivityProcessor, - close_tx: Option>, - remote_project_id: Uuid, -} - -#[async_trait] -impl WsHandler for SharedWsHandler { - async fn handle_message(&mut self, msg: WsMessage) -> Result<(), WsError> { - if let WsMessage::Text(txt) = msg { - match serde_json::from_str::(&txt) { - Ok(ServerMessage::Activity(event)) => { - let seq = event.seq; - if event.project_id != self.remote_project_id { - tracing::warn!( - expected = %self.remote_project_id, - received = %event.project_id, - "received activity for unexpected project via websocket" - ); - return Ok(()); - } - self.processor - .process_event(event) - .await - .map_err(|err| WsError::Handler(Box::new(err)))?; - - tracing::debug!(seq, "processed remote activity"); - } - Ok(ServerMessage::Error { message }) => { - tracing::warn!(?message, "received WS error message"); - // Remote sends this error when client has lagged too far behind. - // Return Err will trigger the `on_close` handler. - return Err(WsError::Handler(Box::new(io::Error::other(format!( - "remote websocket error: {message}" - ))))); - } - Err(err) => { - tracing::error!(raw = %txt, ?err, "unable to parse WS message"); - } - } - } - Ok(()) - } - - async fn on_close(&mut self) -> Result<(), WsError> { - tracing::info!("WebSocket closed, handler cleanup if needed"); - if let Some(tx) = self.close_tx.take() { - let _ = tx.send(()); - } - Ok(()) - } -} - -async fn spawn_shared_remote( - processor: ActivityProcessor, - remote_client: RemoteClient, - url: Url, - close_tx: oneshot::Sender<()>, - remote_project_id: Uuid, -) -> Result { - let remote_client_clone = remote_client.clone(); - let ws_config = WsConfig { - url, - ping_interval: Some(std::time::Duration::from_secs(30)), - header_factory: Some(Arc::new(move || { - let remote_client_clone = remote_client_clone.clone(); - Box::pin(async move { - match remote_client_clone.access_token().await { - Ok(token) => build_ws_headers(&token), - Err(error) => { - tracing::warn!( - ?error, - "failed to obtain access token for websocket connection" - ); - Err(WsError::MissingAuth) - } - } - }) - })), - }; - - let handler = SharedWsHandler { - processor, - close_tx: Some(close_tx), - remote_project_id, - }; - let client = run_ws_client(handler, ws_config) - .await - .map_err(ShareError::from)?; - spawn_ws_auth_refresh_task(client.clone(), remote_client); - - Ok(client) -} - -async fn project_watcher_task( - db: DBService, - processor: ActivityProcessor, - config: ShareConfig, - auth_ctx: AuthContext, - remote_client: RemoteClient, - remote_project_id: Uuid, - mut shutdown_rx: oneshot::Receiver<()>, -) -> Result<(), ShareError> { - let mut backoff = Backoff::new(); - - loop { - if auth_ctx.cached_profile().await.is_none() { - tracing::debug!(%remote_project_id, "waiting for authentication before syncing project"); - tokio::select! { - _ = &mut shutdown_rx => return Ok(()), - _ = backoff.wait() => {} - } - continue; - } - - let mut last_seq = SharedActivityCursor::get(&db.pool, remote_project_id) - .await? - .map(|cursor| cursor.last_seq); - - match processor - .catch_up_project(remote_project_id, last_seq) - .await - { - Ok(seq) => { - last_seq = seq; - } - Err(ShareError::MissingAuth) => { - tracing::debug!(%remote_project_id, "missing auth during catch-up; retrying after backoff"); - tokio::select! { - _ = &mut shutdown_rx => return Ok(()), - _ = backoff.wait() => {} - } - continue; - } - Err(err) => return Err(err), - } - - let ws_url = match config.websocket_endpoint(remote_project_id, last_seq) { - Ok(url) => url, - Err(err) => return Err(ShareError::Url(err)), - }; - - let (close_tx, close_rx) = oneshot::channel(); - let ws_connection = match spawn_shared_remote( - processor.clone(), - remote_client.clone(), - ws_url, - close_tx, - remote_project_id, - ) - .await - { - Ok(conn) => { - backoff.reset(); - conn - } - Err(ShareError::MissingAuth) => { - tracing::debug!(%remote_project_id, "missing auth during websocket connect; retrying"); - tokio::select! { - _ = &mut shutdown_rx => return Ok(()), - _ = backoff.wait() => {} - } - continue; - } - Err(err) => { - tracing::error!(%remote_project_id, ?err, "failed to establish websocket; retrying"); - tokio::select! { - _ = &mut shutdown_rx => return Ok(()), - _ = backoff.wait() => {} - } - continue; - } - }; - - tokio::select! { - _ = &mut shutdown_rx => { - tracing::info!(%remote_project_id, "shutdown signal received for project watcher"); - if let Err(err) = ws_connection.close() { - tracing::debug!(?err, %remote_project_id, "failed to close websocket during shutdown"); - } - return Ok(()); - } - res = close_rx => { - match res { - Ok(()) => { - tracing::info!(%remote_project_id, "project websocket closed; scheduling reconnect"); - } - Err(_) => { - tracing::warn!(%remote_project_id, "project websocket close signal dropped"); - } - } - if let Err(err) = ws_connection.close() { - tracing::debug!(?err, %remote_project_id, "project websocket already closed when reconnecting"); - } - tokio::select! { - _ = &mut shutdown_rx => { - tracing::info!(%remote_project_id, "shutdown received during reconnect wait"); - return Ok(()); - } - _ = backoff.wait() => {} - } - } - } - } -} - -fn build_ws_headers(access_token: &str) -> WsResult> { - let mut headers = Vec::new(); - let value = format!("Bearer {access_token}"); - let header = HeaderValue::from_str(&value).map_err(|err| WsError::Header(err.to_string()))?; - headers.push((AUTHORIZATION, header)); - Ok(headers) -} - -fn spawn_ws_auth_refresh_task(client: WsClient, remote_client: RemoteClient) { - tokio::spawn(async move { - let mut close_rx = client.subscribe_close(); - loop { - match remote_client.access_token().await { - Ok(token) => { - if let Err(err) = send_ws_auth_token(&client, token).await { - tracing::warn!( - ?err, - "failed to send websocket auth token; stopping auth refresh" - ); - break; - } - } - Err(err) => { - tracing::warn!( - ?err, - "failed to obtain access token for websocket auth refresh; stopping auth refresh" - ); - break; - } - } - - tokio::select! { - _ = close_rx.changed() => break, - _ = sleep(WS_AUTH_REFRESH_INTERVAL) => {} - } - } - }); -} - -async fn send_ws_auth_token(client: &WsClient, token: String) -> Result<(), ShareError> { - let payload = serde_json::to_string(&ClientMessage::AuthToken { token })?; - client - .send(WsMessage::Text(payload.into())) - .map_err(ShareError::from) -} - -#[derive(Clone)] -pub struct RemoteSyncHandle { - inner: Arc, -} - -struct RemoteSyncHandleInner { - shutdown: StdMutex>>, - join: StdMutex>>, -} - -impl RemoteSyncHandle { - fn new(shutdown: oneshot::Sender<()>, join: JoinHandle<()>) -> Self { - Self { - inner: Arc::new(RemoteSyncHandleInner { - shutdown: StdMutex::new(Some(shutdown)), - join: StdMutex::new(Some(join)), - }), - } - } - - pub fn request_shutdown(&self) { - if let Some(tx) = self.inner.shutdown.lock().unwrap().take() { - let _ = tx.send(()); - } - } - - pub async fn shutdown(&self) { - self.request_shutdown(); - let join = { - let mut guard = self.inner.join.lock().unwrap(); - guard.take() - }; - - if let Some(join) = join - && let Err(err) = join.await - { - tracing::warn!(?err, "remote sync task join failed"); - } - } -} - -impl Drop for RemoteSyncHandleInner { - fn drop(&mut self) { - if let Some(tx) = self.shutdown.lock().unwrap().take() { - let _ = tx.send(()); - } - if let Some(join) = self.join.lock().unwrap().take() { - join.abort(); - } - } -} - -pub(super) fn convert_remote_task( - task: &RemoteSharedTask, - user: Option<&RemoteUserData>, - last_event_seq: Option, -) -> SharedTaskInput { - SharedTaskInput { - id: task.id, - remote_project_id: task.project_id, - title: task.title.clone(), - description: task.description.clone(), - status: status::from_remote(&task.status), - assignee_user_id: task.assignee_user_id, - assignee_first_name: user.and_then(|u| u.first_name.clone()), - assignee_last_name: user.and_then(|u| u.last_name.clone()), - assignee_username: user.and_then(|u| u.username.clone()), - version: task.version, - last_event_seq, - created_at: task.created_at, - updated_at: task.updated_at, - } -} - -pub(super) async fn sync_local_task_for_shared_task<'e, E>( - executor: E, - shared_task: &SharedTask, - current_user_id: Option, - creator_user_id: Option, - project_id: Option, -) -> Result<(), ShareError> -where - E: Executor<'e, Database = Sqlite>, -{ - let Some(project_id) = project_id else { - return Ok(()); - }; - - let create_task_if_not_exists = { - let assignee_is_current_user = matches!( - (shared_task.assignee_user_id.as_ref(), current_user_id.as_ref()), - (Some(assignee), Some(current)) if assignee == current - ); - let creator_is_current_user = matches!((creator_user_id.as_ref(), current_user_id.as_ref()), (Some(creator), Some(current)) if creator == current); - - assignee_is_current_user - && !(creator_is_current_user && SHARED_TASK_LINKING_LOCK.lock().unwrap().is_locked()) - }; - - Task::sync_from_shared_task( - executor, - SyncTask { - shared_task_id: shared_task.id, - project_id, - title: shared_task.title.clone(), - description: shared_task.description.clone(), - status: shared_task.status.clone(), - }, - create_task_if_not_exists, - ) - .await?; - - Ok(()) -} - -pub async fn link_shared_tasks_to_project( - pool: &SqlitePool, - current_user_id: Option, - project_id: Uuid, - remote_project_id: Uuid, -) -> Result<(), ShareError> { - let tasks = SharedTask::list_by_remote_project_id(pool, remote_project_id).await?; - - if tasks.is_empty() { - return Ok(()); - } - - for task in tasks { - sync_local_task_for_shared_task(pool, &task, current_user_id, None, Some(project_id)) - .await?; - } - - Ok(()) -} - -// Prevent duplicate local tasks from being created during task sharing. -// The activity event handler can create a duplicate local task when it receives a shared task assigned to the current user. -lazy_static::lazy_static! { - pub(super) static ref SHARED_TASK_LINKING_LOCK: StdMutex = StdMutex::new(SharedTaskLinkingLock::new()); -} - -#[derive(Debug)] -pub(super) struct SharedTaskLinkingLock { - count: usize, -} - -impl SharedTaskLinkingLock { - fn new() -> Self { - Self { count: 0 } - } - - pub(super) fn is_locked(&self) -> bool { - self.count > 0 - } - - #[allow(dead_code)] - pub(super) fn guard(&mut self) -> SharedTaskLinkingGuard { - self.count += 1; - SharedTaskLinkingGuard - } -} - -#[allow(dead_code)] -pub(super) struct SharedTaskLinkingGuard; - -impl Drop for SharedTaskLinkingGuard { - fn drop(&mut self) { - SHARED_TASK_LINKING_LOCK.lock().unwrap().count -= 1; - } -} diff --git a/crates/services/src/services/share/config.rs b/crates/services/src/services/share/config.rs index 1d38ba11..b532561e 100644 --- a/crates/services/src/services/share/config.rs +++ b/crates/services/src/services/share/config.rs @@ -1,15 +1,8 @@ use url::Url; -use utils::ws::{WS_BULK_SYNC_THRESHOLD, derive_ws_url}; -use uuid::Uuid; - -const DEFAULT_ACTIVITY_LIMIT: u32 = 200; #[derive(Clone)] pub struct ShareConfig { pub api_base: Url, - pub websocket_base: Url, - pub activity_page_limit: u32, - pub bulk_sync_threshold: u32, } impl ShareConfig { @@ -18,37 +11,7 @@ impl ShareConfig { .ok() .or_else(|| option_env!("VK_SHARED_API_BASE").map(|s| s.to_string()))?; let api_base = Url::parse(raw_base.trim()).ok()?; - let websocket_base = derive_ws_url(api_base.clone()).ok()?; - Some(Self { - api_base, - websocket_base, - activity_page_limit: DEFAULT_ACTIVITY_LIMIT, - bulk_sync_threshold: WS_BULK_SYNC_THRESHOLD, - }) - } - - pub fn activity_endpoint(&self) -> Result { - self.api_base.join("/v1/activity") - } - - pub fn bulk_tasks_endpoint(&self) -> Result { - self.api_base.join("/v1/tasks/bulk") - } - - pub fn websocket_endpoint( - &self, - project_id: Uuid, - cursor: Option, - ) -> Result { - let mut url = self.websocket_base.join("/v1/ws")?; - { - let mut qp = url.query_pairs_mut(); - qp.append_pair("project_id", &project_id.to_string()); - if let Some(c) = cursor { - qp.append_pair("cursor", &c.to_string()); - } - } - Ok(url) + Some(Self { api_base }) } } diff --git a/crates/services/src/services/share/processor.rs b/crates/services/src/services/share/processor.rs deleted file mode 100644 index 7da660ff..00000000 --- a/crates/services/src/services/share/processor.rs +++ /dev/null @@ -1,340 +0,0 @@ -use std::collections::HashSet; - -use db::{ - DBService, - models::{ - project::Project, - shared_task::{SharedActivityCursor, SharedTask, SharedTaskInput}, - task::Task, - }, -}; -use remote::{ - activity::ActivityEvent, db::tasks::SharedTaskActivityPayload, - routes::tasks::BulkSharedTasksResponse, -}; -use sqlx::{Sqlite, Transaction}; -use uuid::Uuid; - -use super::{ShareConfig, ShareError, convert_remote_task, sync_local_task_for_shared_task}; -use crate::services::{auth::AuthContext, remote_client::RemoteClient}; - -struct PreparedBulkTask { - input: SharedTaskInput, - creator_user_id: Option, - project_id: Option, -} - -/// Processor for handling activity events and synchronizing shared tasks. -#[derive(Clone)] -pub struct ActivityProcessor { - db: DBService, - config: ShareConfig, - remote_client: RemoteClient, - auth_ctx: AuthContext, -} - -impl ActivityProcessor { - pub fn new( - db: DBService, - config: ShareConfig, - remote_client: RemoteClient, - auth_ctx: AuthContext, - ) -> Self { - Self { - db, - config, - remote_client, - auth_ctx, - } - } - - pub fn remote_client(&self) -> RemoteClient { - self.remote_client.clone() - } - - pub async fn process_event(&self, event: ActivityEvent) -> Result<(), ShareError> { - let mut tx = self.db.pool.begin().await?; - match event.event_type.as_str() { - "task.deleted" => self.process_deleted_task_event(&mut tx, &event).await?, - _ => self.process_upsert_event(&mut tx, &event).await?, - } - - SharedActivityCursor::upsert(tx.as_mut(), event.project_id, event.seq).await?; - tx.commit().await?; - Ok(()) - } - - /// Fetch and process activity events until caught up, falling back to bulk syncs when needed. - pub async fn catch_up_project( - &self, - remote_project_id: Uuid, - mut last_seq: Option, - ) -> Result, ShareError> { - if last_seq.is_none() { - last_seq = self.bulk_sync(remote_project_id).await?; - } - - loop { - let events = self.fetch_activity(remote_project_id, last_seq).await?; - if events.is_empty() { - break; - } - - // Perform a bulk sync if we've fallen too far behind - if let Some(prev_seq) = last_seq - && let Some(newest) = events.last() - && newest.seq.saturating_sub(prev_seq) > self.config.bulk_sync_threshold as i64 - { - last_seq = self.bulk_sync(remote_project_id).await?; - continue; - } - - let page_len = events.len(); - for ev in events { - if ev.project_id != remote_project_id { - tracing::warn!( - expected = %remote_project_id, - received = %ev.project_id, - "received activity for unexpected project; ignoring" - ); - continue; - } - self.process_event(ev.clone()).await?; - last_seq = Some(ev.seq); - } - - if page_len < (self.config.activity_page_limit as usize) { - break; - } - } - - Ok(last_seq) - } - - /// Fetch a page of activity events from the remote service. - async fn fetch_activity( - &self, - remote_project_id: Uuid, - after: Option, - ) -> Result, ShareError> { - let resp = self - .remote_client - .fetch_activity(remote_project_id, after, self.config.activity_page_limit) - .await?; - Ok(resp.data) - } - - async fn resolve_project( - &self, - task_id: Uuid, - remote_project_id: Uuid, - ) -> Result, ShareError> { - if let Some(existing) = SharedTask::find_by_id(&self.db.pool, task_id).await? - && let Some(project) = - Project::find_by_remote_project_id(&self.db.pool, existing.remote_project_id) - .await? - { - return Ok(Some(project)); - } - - if let Some(project) = - Project::find_by_remote_project_id(&self.db.pool, remote_project_id).await? - { - return Ok(Some(project)); - } - - Ok(None) - } - - async fn process_upsert_event( - &self, - tx: &mut Transaction<'_, Sqlite>, - event: &ActivityEvent, - ) -> Result<(), ShareError> { - let Some(payload) = &event.payload else { - tracing::warn!(event_id = %event.event_id, "received activity event with empty payload"); - return Ok(()); - }; - - match serde_json::from_value::(payload.clone()) { - Ok(SharedTaskActivityPayload { task, user }) => { - let project = self.resolve_project(task.id, event.project_id).await?; - if project.is_none() { - tracing::debug!( - task_id = %task.id, - remote_project_id = %task.project_id, - "stored shared task without local project; awaiting link" - ); - } - - let project_id = project.as_ref().map(|p| p.id); - let input = convert_remote_task(&task, user.as_ref(), Some(event.seq)); - let shared_task = SharedTask::upsert(tx.as_mut(), input).await?; - - let current_profile = self.auth_ctx.cached_profile().await; - let current_user_id = current_profile.as_ref().map(|p| p.user_id); - sync_local_task_for_shared_task( - tx.as_mut(), - &shared_task, - current_user_id, - task.creator_user_id, - project_id, - ) - .await?; - } - Err(error) => { - tracing::warn!( - ?error, - event_id = %event.event_id, - "unrecognized shared task payload; skipping" - ); - } - } - - Ok(()) - } - - async fn process_deleted_task_event( - &self, - tx: &mut Transaction<'_, Sqlite>, - event: &ActivityEvent, - ) -> Result<(), ShareError> { - let Some(payload) = &event.payload else { - tracing::warn!( - event_id = %event.event_id, - "received delete event without payload; skipping" - ); - return Ok(()); - }; - - let SharedTaskActivityPayload { task, .. } = - match serde_json::from_value::(payload.clone()) { - Ok(payload) => payload, - Err(error) => { - tracing::warn!( - ?error, - event_id = %event.event_id, - "failed to parse deleted task payload; skipping" - ); - return Ok(()); - } - }; - - if let Some(local_task) = Task::find_by_shared_task_id(tx.as_mut(), task.id).await? { - Task::set_shared_task_id(tx.as_mut(), local_task.id, None).await?; - } - - SharedTask::remove(tx.as_mut(), task.id).await?; - Ok(()) - } - - async fn bulk_sync(&self, remote_project_id: Uuid) -> Result, ShareError> { - let bulk_resp = self.fetch_bulk_snapshot(remote_project_id).await?; - let latest_seq = bulk_resp.latest_seq; - - let mut keep_ids = HashSet::new(); - let mut replacements = Vec::new(); - - for payload in bulk_resp.tasks { - let project = self - .resolve_project(payload.task.id, remote_project_id) - .await?; - - if project.is_none() { - tracing::debug!( - task_id = %payload.task.id, - remote_project_id = %payload.task.project_id, - "storing shared task during bulk sync without local project" - ); - } - - let project_id = project.as_ref().map(|p| p.id); - keep_ids.insert(payload.task.id); - let input = convert_remote_task(&payload.task, payload.user.as_ref(), latest_seq); - replacements.push(PreparedBulkTask { - input, - creator_user_id: payload.task.creator_user_id, - project_id, - }); - } - - let mut stale: HashSet = - SharedTask::list_by_remote_project_id(&self.db.pool, remote_project_id) - .await? - .into_iter() - .filter_map(|task| { - if keep_ids.contains(&task.id) { - None - } else { - Some(task.id) - } - }) - .collect(); - - for deleted in bulk_resp.deleted_task_ids { - if !keep_ids.contains(&deleted) { - stale.insert(deleted); - } - } - - let stale_vec: Vec = stale.into_iter().collect(); - let current_profile = self.auth_ctx.cached_profile().await; - let current_user_id = current_profile.as_ref().map(|p| p.user_id); - - let mut tx = self.db.pool.begin().await?; - self.remove_stale_tasks(&mut tx, &stale_vec).await?; - - for PreparedBulkTask { - input, - creator_user_id, - project_id, - } in replacements - { - let shared_task = SharedTask::upsert(tx.as_mut(), input).await?; - sync_local_task_for_shared_task( - tx.as_mut(), - &shared_task, - current_user_id, - creator_user_id, - project_id, - ) - .await?; - } - - if let Some(seq) = latest_seq { - SharedActivityCursor::upsert(tx.as_mut(), remote_project_id, seq).await?; - } - - tx.commit().await?; - Ok(latest_seq) - } - - async fn remove_stale_tasks( - &self, - tx: &mut Transaction<'_, Sqlite>, - ids: &[Uuid], - ) -> Result<(), ShareError> { - if ids.is_empty() { - return Ok(()); - } - - for id in ids { - if let Some(local_task) = Task::find_by_shared_task_id(tx.as_mut(), *id).await? { - Task::set_shared_task_id(tx.as_mut(), local_task.id, None).await?; - } - } - - SharedTask::remove_many(tx.as_mut(), ids).await?; - Ok(()) - } - - async fn fetch_bulk_snapshot( - &self, - remote_project_id: Uuid, - ) -> Result { - Ok(self - .remote_client - .fetch_bulk_snapshot(remote_project_id) - .await?) - } -} diff --git a/crates/services/src/services/share/publisher.rs b/crates/services/src/services/share/publisher.rs index e2799770..0b176c9a 100644 --- a/crates/services/src/services/share/publisher.rs +++ b/crates/services/src/services/share/publisher.rs @@ -1,14 +1,16 @@ use db::{ DBService, - models::{project::Project, shared_task::SharedTask, task::Task}, + models::{ + project::Project, + task::{CreateTask, Task, TaskStatus}, + }, }; use remote::routes::tasks::{ - AssignSharedTaskRequest, CreateSharedTaskRequest, DeleteSharedTaskRequest, SharedTaskResponse, - UpdateSharedTaskRequest, + AssignSharedTaskRequest, CreateSharedTaskRequest, SharedTaskResponse, UpdateSharedTaskRequest, }; use uuid::Uuid; -use super::{ShareError, convert_remote_task, status}; +use super::{ShareError, status}; use crate::services::remote_client::RemoteClient; #[derive(Clone)] @@ -17,6 +19,15 @@ pub struct SharePublisher { client: RemoteClient, } +#[derive(Debug, Clone, serde::Deserialize, serde::Serialize, ts_rs::TS)] +pub struct SharedTaskDetails { + pub id: Uuid, + pub project_id: Uuid, + pub title: String, + pub description: Option, + pub status: TaskStatus, +} + impl SharePublisher { pub fn new(db: DBService, client: RemoteClient) -> Self { Self { db, client } @@ -47,7 +58,7 @@ impl SharePublisher { let remote_task = self.client.create_shared_task(&payload).await?; - self.sync_shared_task(&task, &remote_task).await?; + Task::set_shared_task_id(&self.db.pool, task.id, Some(remote_task.task.id)).await?; Ok(remote_task.task.id) } @@ -61,16 +72,12 @@ impl SharePublisher { title: Some(task.title.clone()), description: task.description.clone(), status: Some(status::to_remote(&task.status)), - version: None, }; - let remote_task = self - .client + self.client .update_shared_task(shared_task_id, &payload) .await?; - self.sync_shared_task(task, &remote_task).await?; - Ok(()) } @@ -84,10 +91,9 @@ impl SharePublisher { pub async fn assign_shared_task( &self, - shared_task: &SharedTask, + shared_task_id: Uuid, new_assignee_user_id: Option, - version: Option, - ) -> Result { + ) -> Result { let assignee_uuid = new_assignee_user_id .map(|id| uuid::Uuid::parse_str(&id)) .transpose() @@ -95,62 +101,104 @@ impl SharePublisher { let payload = AssignSharedTaskRequest { new_assignee_user_id: assignee_uuid, - version, }; - let SharedTaskResponse { - task: remote_task, - user, - } = self + let response = self .client - .assign_shared_task(shared_task.id, &payload) + .assign_shared_task(shared_task_id, &payload) .await?; - let input = convert_remote_task(&remote_task, user.as_ref(), None); - let record = SharedTask::upsert(&self.db.pool, input).await?; - Ok(record) + Ok(response) } pub async fn delete_shared_task(&self, shared_task_id: Uuid) -> Result<(), ShareError> { - let shared_task = SharedTask::find_by_id(&self.db.pool, shared_task_id) - .await? - .ok_or(ShareError::TaskNotFound(shared_task_id))?; - - let payload = DeleteSharedTaskRequest { - version: Some(shared_task.version), - }; - - self.client - .delete_shared_task(shared_task.id, &payload) - .await?; + self.client.delete_shared_task(shared_task_id).await?; if let Some(local_task) = - Task::find_by_shared_task_id(&self.db.pool, shared_task.id).await? + Task::find_by_shared_task_id(&self.db.pool, shared_task_id).await? { Task::set_shared_task_id(&self.db.pool, local_task.id, None).await?; } - SharedTask::remove(&self.db.pool, shared_task.id).await?; Ok(()) } - async fn sync_shared_task( + pub async fn link_shared_task( &self, - task: &Task, - remote_task: &SharedTaskResponse, - ) -> Result<(), ShareError> { - let SharedTaskResponse { - task: remote_task, - user, - } = remote_task; + shared_task: SharedTaskDetails, + ) -> Result, ShareError> { + if let Some(task) = Task::find_by_shared_task_id(&self.db.pool, shared_task.id).await? { + return Ok(Some(task)); + } - Project::find_by_id(&self.db.pool, task.project_id) + if !self.shared_task_exists(shared_task.id).await? { + return Ok(None); + } + + let create_task = CreateTask::from_shared_task( + shared_task.project_id, + shared_task.title, + shared_task.description, + shared_task.status, + shared_task.id, + ); + + let id = Uuid::new_v4(); + let task = Task::create(&self.db.pool, &create_task, id).await?; + + Ok(Some(task)) + } + + async fn shared_task_exists(&self, shared_task_id: Uuid) -> Result { + Ok(self + .client + .check_tasks(vec![shared_task_id]) .await? - .ok_or(ShareError::ProjectNotFound(task.project_id))?; + .contains(&shared_task_id)) + } + + pub async fn cleanup_shared_tasks(&self) -> Result<(), ShareError> { + let tasks = Task::find_all_shared(&self.db.pool).await?; + if tasks.is_empty() { + return Ok(()); + } + + let shared_ids: Vec = tasks.iter().filter_map(|t| t.shared_task_id).collect(); + + if shared_ids.is_empty() { + return Ok(()); + } + + // Verify in chunks of 100 to avoid hitting payload limits + for chunk in shared_ids.chunks(100) { + let existing_ids = match self.client.check_tasks(chunk.to_vec()).await { + Ok(ids) => ids, + Err(e) => { + tracing::warn!("Failed to check task existence: {}", e); + continue; + } + }; + + let existing_set: std::collections::HashSet = existing_ids.into_iter().collect(); + + let missing_ids: Vec = chunk + .iter() + .filter(|id| !existing_set.contains(id)) + .cloned() + .collect(); + + if !missing_ids.is_empty() { + tracing::info!( + "Unlinking ({}) shared tasks that no longer exist in remote", + missing_ids.len() + ); + + if let Err(e) = Task::batch_unlink_shared_tasks(&self.db.pool, &missing_ids).await { + tracing::error!("Failed to batch unlink tasks: {}", e); + } + } + } - let input = convert_remote_task(remote_task, user.as_ref(), None); - SharedTask::upsert(&self.db.pool, input).await?; - Task::set_shared_task_id(&self.db.pool, task.id, Some(remote_task.id)).await?; Ok(()) } } diff --git a/crates/services/src/services/share/status.rs b/crates/services/src/services/share/status.rs index a614698b..e66a8997 100644 --- a/crates/services/src/services/share/status.rs +++ b/crates/services/src/services/share/status.rs @@ -10,13 +10,3 @@ pub(super) fn to_remote(status: &TaskStatus) -> RemoteTaskStatus { TaskStatus::Cancelled => RemoteTaskStatus::Cancelled, } } - -pub(super) fn from_remote(status: &RemoteTaskStatus) -> TaskStatus { - match status { - RemoteTaskStatus::Todo => TaskStatus::Todo, - RemoteTaskStatus::InProgress => TaskStatus::InProgress, - RemoteTaskStatus::InReview => TaskStatus::InReview, - RemoteTaskStatus::Done => TaskStatus::Done, - RemoteTaskStatus::Cancelled => TaskStatus::Cancelled, - } -} diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index c88b8582..67a4d479 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -28,15 +28,12 @@ tokio = { workspace = true } futures = "0.3.31" tokio-stream = { version = "0.1.17", features = ["sync"] } tokio-tungstenite = { version = "0.28", features = ["rustls-tls-webpki-roots", "url"] } -async-stream = "0.3" shellexpand = "3.1.1" which = "8.0.0" similar = "2" git2 = "0.18" dirs = "5.0" -async-trait = { workspace = true } thiserror = { workspace = true } -dashmap = "6.1" url = "2.5" reqwest = { version = "0.12", features = ["json"] } sqlx = { version = "0.8.6", default-features = false, features = ["postgres", "uuid", "chrono"] } diff --git a/crates/utils/src/jwt.rs b/crates/utils/src/jwt.rs index 4f96f6bd..1ccf170b 100644 --- a/crates/utils/src/jwt.rs +++ b/crates/utils/src/jwt.rs @@ -2,6 +2,7 @@ use chrono::{DateTime, Utc}; use jsonwebtoken::dangerous::insecure_decode; use serde::Deserialize; use thiserror::Error; +use uuid::Uuid; #[derive(Debug, Error)] pub enum TokenClaimsError { @@ -11,6 +12,10 @@ pub enum TokenClaimsError { MissingExpiration, #[error("invalid `exp` value `{0}`")] InvalidExpiration(i64), + #[error("missing `sub` claim in token")] + MissingSubject, + #[error("invalid `sub` value: {0}")] + InvalidSubject(String), } #[derive(Debug, Deserialize)] @@ -18,9 +23,21 @@ struct ExpClaim { exp: Option, } +#[derive(Debug, Deserialize)] +struct SubClaim { + sub: Option, +} + /// Extract the expiration timestamp from a JWT without verifying its signature. pub fn extract_expiration(token: &str) -> Result, TokenClaimsError> { let data = insecure_decode::(token)?; let exp = data.claims.exp.ok_or(TokenClaimsError::MissingExpiration)?; DateTime::from_timestamp(exp, 0).ok_or(TokenClaimsError::InvalidExpiration(exp)) } + +/// Extract the subject (user ID) from a JWT without verifying its signature. +pub fn extract_subject(token: &str) -> Result { + let data = insecure_decode::(token)?; + let sub = data.claims.sub.ok_or(TokenClaimsError::MissingSubject)?; + Uuid::parse_str(&sub).map_err(|_| TokenClaimsError::InvalidSubject(sub)) +} diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index bad90be9..8d2b0a1e 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -20,7 +20,6 @@ pub mod stream_lines; pub mod text; pub mod tokio; pub mod version; -pub mod ws; /// Cache for WSL2 detection result static WSL2_CACHE: OnceLock = OnceLock::new(); diff --git a/crates/utils/src/ws.rs b/crates/utils/src/ws.rs deleted file mode 100644 index 648369b7..00000000 --- a/crates/utils/src/ws.rs +++ /dev/null @@ -1,229 +0,0 @@ -use std::{sync::Arc, time::Duration}; - -use axum::http::{self, HeaderName, HeaderValue}; -use futures::future::BoxFuture; -use futures_util::{SinkExt, StreamExt}; -use thiserror::Error; -use tokio::sync::{mpsc, watch}; -use tokio_tungstenite::{ - connect_async, - tungstenite::{client::IntoClientRequest, protocol::Message}, -}; -use url::Url; - -/// Interval between authentication refresh probes for websocket connections. -pub const WS_AUTH_REFRESH_INTERVAL: Duration = Duration::from_secs(30); -/// Grace period to tolerate expired tokens while a websocket client refreshes its session. -pub const WS_TOKEN_EXPIRY_GRACE: Duration = Duration::from_secs(120); -/// Maximum time allowed between REST catch-up and websocket connection establishment. -pub const WS_MAX_DELAY_BETWEEN_CATCHUP_AND_WS: Duration = WS_TOKEN_EXPIRY_GRACE; -/// Maximum backlog accepted before forcing clients to do a full bulk sync. -pub const WS_BULK_SYNC_THRESHOLD: u32 = 500; - -pub type HeaderFuture = BoxFuture<'static, WsResult>>; -pub type HeaderFactory = Arc HeaderFuture + Send + Sync>; - -#[derive(Error, Debug)] -pub enum WsError { - #[error("WebSocket connection error: {0}")] - Connection(#[from] tokio_tungstenite::tungstenite::Error), - - #[error("IO error: {0}")] - Io(#[from] std::io::Error), - - #[error("Send error: {0}")] - Send(String), - - #[error("Handler error: {0}")] - Handler(#[from] Box), - - #[error("Shutdown channel closed unexpectedly")] - ShutdownChannelClosed, - - #[error("failed to build websocket request: {0}")] - Request(#[from] http::Error), - - #[error("failed to prepare websocket headers: {0}")] - Header(String), - - #[error("share authentication missing or expired")] - MissingAuth, -} - -pub type WsResult = std::result::Result; - -#[async_trait::async_trait] -pub trait WsHandler: Send + Sync + 'static { - /// Called when a new `Message` is received. - async fn handle_message(&mut self, msg: Message) -> WsResult<()>; - - /// Called when the socket is closed (either remote closed or error). - async fn on_close(&mut self) -> WsResult<()>; -} - -pub struct WsConfig { - pub url: Url, - pub ping_interval: Option, - pub header_factory: Option, -} - -#[derive(Clone)] -pub struct WsClient { - msg_tx: mpsc::UnboundedSender, - cancelation_token: watch::Sender<()>, -} - -impl WsClient { - pub fn send(&self, msg: Message) -> WsResult<()> { - self.msg_tx - .send(msg) - .map_err(|e| WsError::Send(format!("WebSocket send error: {e}"))) - } - - pub fn close(&self) -> WsResult<()> { - self.cancelation_token - .send(()) - .map_err(|_| WsError::ShutdownChannelClosed) - } - - pub fn subscribe_close(&self) -> watch::Receiver<()> { - self.cancelation_token.subscribe() - } -} - -/// Launches a WebSocket connection with read/write tasks. -/// Returns a `WsClient` which you can use to send messages or request shutdown. -pub async fn run_ws_client(mut handler: H, config: WsConfig) -> WsResult -where - H: WsHandler, -{ - let (msg_tx, mut msg_rx) = mpsc::unbounded_channel(); - let (cancel_tx, cancel_rx) = watch::channel(()); - let task_tx = msg_tx.clone(); - - tokio::spawn(async move { - tracing::debug!(url = %config.url, "WebSocket connecting"); - let request = match build_request(&config).await { - Ok(req) => req, - Err(err) => { - tracing::error!(?err, "failed to build websocket request"); - return; - } - }; - - match connect_async(request).await { - Ok((ws_stream, _resp)) => { - tracing::info!("WebSocket connected"); - - let (mut ws_sink, mut ws_stream) = ws_stream.split(); - - let ping_task = if let Some(interval) = config.ping_interval { - let mut intv = tokio::time::interval(interval); - let mut cancel_rx2 = cancel_rx.clone(); - let ping_tx2 = task_tx.clone(); - Some(tokio::spawn(async move { - loop { - tokio::select! { - _ = intv.tick() => { - if ping_tx2.send(Message::Ping(Vec::new().into())).is_err() { break; } - } - _ = cancel_rx2.changed() => { break; } - } - } - })) - } else { - None - }; - - loop { - let mut cancel_rx2 = cancel_rx.clone(); - tokio::select! { - maybe = msg_rx.recv() => { - match maybe { - Some(msg) => { - if let Err(err) = ws_sink.send(msg).await { - tracing::error!("WebSocket send failed: {:?}", err); - break; - } - } - None => { - tracing::debug!("WebSocket msg_rx closed"); - break; - } - } - } - - incoming = ws_stream.next() => { - match incoming { - Some(Ok(msg)) => { - if let Err(err) = handler.handle_message(msg).await { - tracing::error!("WsHandler failed: {:?}", err); - break; - } - } - Some(Err(err)) => { - tracing::error!("WebSocket stream error: {:?}", err); - break; - } - None => { - tracing::debug!("WebSocket stream ended"); - break; - } - } - } - - _ = cancel_rx2.changed() => { - tracing::debug!("WebSocket shutdown requested"); - break; - } - } - } - - if let Err(err) = handler.on_close().await { - tracing::error!("WsHandler on_close failed: {:?}", err); - } - - if let Err(err) = ws_sink.close().await { - tracing::error!("WebSocket close failed: {:?}", err); - } - - if let Some(task) = ping_task { - task.abort(); - } - } - Err(err) => { - tracing::error!("WebSocket connect error: {:?}", err); - } - } - - tracing::info!("WebSocket client task exiting"); - }); - - Ok(WsClient { - msg_tx, - cancelation_token: cancel_tx, - }) -} - -async fn build_request(config: &WsConfig) -> WsResult> { - let mut request = config.url.clone().into_client_request()?; - if let Some(factory) = &config.header_factory { - let headers = factory().await?; - for (name, value) in headers { - request.headers_mut().insert(name, value); - } - } - - Ok(request) -} - -pub fn derive_ws_url(mut base: Url) -> Result { - match base.scheme() { - "https" => base.set_scheme("wss").unwrap(), - "http" => base.set_scheme("ws").unwrap(), - _ => { - return Err(url::ParseError::RelativeUrlWithoutBase); - } - } - Ok(base) -} diff --git a/frontend/package.json b/frontend/package.json index 8ba40f39..c07fe743 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -44,6 +44,8 @@ "@sentry/react": "^9.34.0", "@sentry/vite-plugin": "^3.5.0", "@tailwindcss/typography": "^0.5.16", + "@tanstack/electric-db-collection": "^0.2.6", + "@tanstack/react-db": "^0.1.50", "@tanstack/react-devtools": "^0.8.0", "@tanstack/react-form": "^1.23.8", "@tanstack/react-form-devtools": "^0.1.8", @@ -78,6 +80,7 @@ "tailwind-merge": "^2.2.0", "tailwindcss-animate": "^1.0.7", "vibe-kanban-web-companion": "^0.0.4", + "wa-sqlite": "^1.0.0", "zod": "^4.1.12", "zustand": "^4.5.4" }, diff --git a/frontend/src/components/ConfigProvider.tsx b/frontend/src/components/ConfigProvider.tsx index 6c58af56..16f071d8 100644 --- a/frontend/src/components/ConfigProvider.tsx +++ b/frontend/src/components/ConfigProvider.tsx @@ -5,8 +5,8 @@ import { useContext, useEffect, useMemo, - useState, } from 'react'; +import { useQuery, useQueryClient } from '@tanstack/react-query'; import { type Config, type Environment, @@ -63,47 +63,26 @@ interface UserSystemProviderProps { } export function UserSystemProvider({ children }: UserSystemProviderProps) { - // Split state for performance - independent re-renders - const [config, setConfig] = useState(null); - const [environment, setEnvironment] = useState(null); - const [profiles, setProfiles] = useState | null>(null); - const [capabilities, setCapabilities] = useState | null>(null); - const [analyticsUserId, setAnalyticsUserId] = useState(null); - const [loginStatus, setLoginStatus] = useState(null); - const [loading, setLoading] = useState(true); + const queryClient = useQueryClient(); - useEffect(() => { - const loadUserSystem = async () => { - try { - const userSystemInfo: UserSystemInfo = await configApi.getConfig(); - setConfig(userSystemInfo.config); - setEnvironment(userSystemInfo.environment); - setAnalyticsUserId(userSystemInfo.analytics_user_id); - setLoginStatus(userSystemInfo.login_status); - setProfiles( - userSystemInfo.executors as Record | null - ); - setCapabilities( - (userSystemInfo.capabilities || null) as Record< - string, - BaseAgentCapability[] - > | null - ); - } catch (err) { - console.error('Error loading user system:', err); - } finally { - setLoading(false); - } - }; + const { data: userSystemInfo, isLoading } = useQuery({ + queryKey: ['user-system'], + queryFn: configApi.getConfig, + staleTime: 5 * 60 * 1000, // 5 minutes + }); - loadUserSystem(); - }, []); + const config = userSystemInfo?.config || null; + const environment = userSystemInfo?.environment || null; + const analyticsUserId = userSystemInfo?.analytics_user_id || null; + const loginStatus = userSystemInfo?.login_status || null; + const profiles = + (userSystemInfo?.executors as Record | null) || + null; + const capabilities = + (userSystemInfo?.capabilities as Record< + string, + BaseAgentCapability[] + > | null) || null; // Sync language with i18n when config changes useEffect(() => { @@ -112,9 +91,18 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { } }, [config?.language]); - const updateConfig = useCallback((updates: Partial) => { - setConfig((prev) => (prev ? { ...prev, ...updates } : null)); - }, []); + const updateConfig = useCallback( + (updates: Partial) => { + queryClient.setQueryData(['user-system'], (old) => { + if (!old) return old; + return { + ...old, + config: { ...old.config, ...updates }, + }; + }); + }, + [queryClient] + ); const saveConfig = useCallback(async (): Promise => { if (!config) return false; @@ -129,48 +117,66 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { const updateAndSaveConfig = useCallback( async (updates: Partial): Promise => { - setLoading(true); - const newConfig: Config | null = config - ? { ...config, ...updates } - : null; + if (!config) return false; + + const newConfig = { ...config, ...updates }; + updateConfig(updates); + try { - if (!newConfig) return false; const saved = await configApi.saveConfig(newConfig); - setConfig(saved); + queryClient.setQueryData(['user-system'], (old) => { + if (!old) return old; + return { + ...old, + config: saved, + }; + }); return true; } catch (err) { console.error('Error saving config:', err); + queryClient.invalidateQueries({ queryKey: ['user-system'] }); return false; - } finally { - setLoading(false); } }, - [config] + [config, queryClient, updateConfig] ); const reloadSystem = useCallback(async () => { - setLoading(true); - try { - const userSystemInfo: UserSystemInfo = await configApi.getConfig(); - setConfig(userSystemInfo.config); - setEnvironment(userSystemInfo.environment); - setAnalyticsUserId(userSystemInfo.analytics_user_id); - setLoginStatus(userSystemInfo.login_status); - setProfiles( - userSystemInfo.executors as Record | null - ); - setCapabilities( - (userSystemInfo.capabilities || null) as Record< - string, - BaseAgentCapability[] - > | null - ); - } catch (err) { - console.error('Error reloading user system:', err); - } finally { - setLoading(false); - } - }, []); + await queryClient.invalidateQueries({ queryKey: ['user-system'] }); + }, [queryClient]); + + const setEnvironment = useCallback( + (env: Environment | null) => { + queryClient.setQueryData(['user-system'], (old) => { + if (!old || !env) return old; + return { ...old, environment: env }; + }); + }, + [queryClient] + ); + + const setProfiles = useCallback( + (newProfiles: Record | null) => { + queryClient.setQueryData(['user-system'], (old) => { + if (!old || !newProfiles) return old; + return { + ...old, + executors: newProfiles as unknown as UserSystemInfo['executors'], + }; + }); + }, + [queryClient] + ); + + const setCapabilities = useCallback( + (newCapabilities: Record | null) => { + queryClient.setQueryData(['user-system'], (old) => { + if (!old || !newCapabilities) return old; + return { ...old, capabilities: newCapabilities }; + }); + }, + [queryClient] + ); // Memoize context value to prevent unnecessary re-renders const value = useMemo( @@ -196,7 +202,7 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { setProfiles, setCapabilities, reloadSystem, - loading, + loading: isLoading, }), [ config, @@ -209,7 +215,10 @@ export function UserSystemProvider({ children }: UserSystemProviderProps) { saveConfig, updateAndSaveConfig, reloadSystem, - loading, + isLoading, + setEnvironment, + setProfiles, + setCapabilities, ] ); diff --git a/frontend/src/components/dialogs/global/OAuthDialog.tsx b/frontend/src/components/dialogs/global/OAuthDialog.tsx index 852adb7e..d98436dc 100644 --- a/frontend/src/components/dialogs/global/OAuthDialog.tsx +++ b/frontend/src/components/dialogs/global/OAuthDialog.tsx @@ -107,7 +107,7 @@ const OAuthDialogImpl = NiceModal.create(() => { setState({ type: 'success', profile: statusData.profile }); setTimeout(() => { modal.resolve(statusData.profile); - modal.hide(); + modal.remove(); }, 1500); } }, [statusData, isPolling, modal, reloadSystem]); @@ -129,7 +129,7 @@ const OAuthDialogImpl = NiceModal.create(() => { } setState({ type: 'select' }); modal.resolve(null); - modal.hide(); + modal.remove(); }; const handleBack = () => { diff --git a/frontend/src/components/dialogs/tasks/ReassignDialog.tsx b/frontend/src/components/dialogs/tasks/ReassignDialog.tsx index 48720f21..4d809be1 100644 --- a/frontend/src/components/dialogs/tasks/ReassignDialog.tsx +++ b/frontend/src/components/dialogs/tasks/ReassignDialog.tsx @@ -93,10 +93,9 @@ const ReassignDialogImpl = NiceModal.create( mutationFn: async (newAssignee: string) => tasksApi.reassign(sharedTask.id, { new_assignee_user_id: newAssignee, - version: sharedTask.version, }), - onSuccess: (result) => { - modal.resolve(result.shared_task); + onSuccess: (shared_task) => { + modal.resolve(shared_task); modal.hide(); }, onError: (error) => { diff --git a/frontend/src/components/dialogs/tasks/ShareDialog.tsx b/frontend/src/components/dialogs/tasks/ShareDialog.tsx index 2907b293..4fd9a0a3 100644 --- a/frontend/src/components/dialogs/tasks/ShareDialog.tsx +++ b/frontend/src/components/dialogs/tasks/ShareDialog.tsx @@ -33,13 +33,14 @@ const ShareDialogImpl = NiceModal.create(({ task }) => { const { isSignedIn } = useAuth(); const { project } = useProject(); const { shareTask } = useTaskMutations(task.project_id); + const { reset: resetShareTask } = shareTask; const [shareError, setShareError] = useState(null); useEffect(() => { - shareTask.reset(); + resetShareTask(); setShareError(null); - }, [task.id, shareTask]); + }, [task.id, resetShareTask]); const handleClose = () => { modal.resolve(shareTask.isSuccess); @@ -68,7 +69,13 @@ const ShareDialogImpl = NiceModal.create(({ task }) => { modal.hide(); } catch (err) { if (getStatus(err) === 401) { - void OAuthDialog.show(); + // Hide this dialog first so OAuthDialog appears on top + modal.hide(); + const result = await OAuthDialog.show(); + // If user successfully authenticated, re-show this dialog + if (result) { + void ShareDialog.show({ task }); + } return; } setShareError(getReadableError(err)); diff --git a/frontend/src/components/tasks/TaskCard.tsx b/frontend/src/components/tasks/TaskCard.tsx index 46b694b4..0171b1f8 100644 --- a/frontend/src/components/tasks/TaskCard.tsx +++ b/frontend/src/components/tasks/TaskCard.tsx @@ -10,6 +10,7 @@ import { attemptsApi } from '@/lib/api'; import type { SharedTaskRecord } from '@/hooks/useProjectTasks'; import { TaskCardHeader } from './TaskCardHeader'; import { useTranslation } from 'react-i18next'; +import { useAuth } from '@/hooks'; type Task = TaskWithAttemptStatus; @@ -35,6 +36,7 @@ export function TaskCard({ const { t } = useTranslation('tasks'); const navigate = useNavigateWithSearch(); const [isNavigatingToParent, setIsNavigatingToParent] = useState(false); + const { isSignedIn } = useAuth(); const handleClick = useCallback(() => { onViewDetails(task); @@ -87,8 +89,9 @@ export function TaskCard({ onClick={handleClick} isOpen={isOpen} forwardedRef={localRef} + dragDisabled={(!!sharedTask || !!task.shared_task_id) && !isSignedIn} className={ - sharedTask + sharedTask || task.shared_task_id ? 'relative overflow-hidden pl-5 before:absolute before:left-0 before:top-0 before:bottom-0 before:w-[3px] before:bg-card-foreground before:content-[""]' : undefined } diff --git a/frontend/src/components/ui/actions-dropdown.tsx b/frontend/src/components/ui/actions-dropdown.tsx index 9f9576af..3207290c 100644 --- a/frontend/src/components/ui/actions-dropdown.tsx +++ b/frontend/src/components/ui/actions-dropdown.tsx @@ -42,11 +42,12 @@ export function ActionsDropdown({ const { projectId } = useProject(); const openInEditor = useOpenInEditor(attempt?.id); const navigate = useNavigate(); - const { userId } = useAuth(); + const { userId, isSignedIn } = useAuth(); const hasAttemptActions = Boolean(attempt); const hasTaskActions = Boolean(task); const isShared = Boolean(sharedTask); + const canEditShared = (!isShared && !task?.shared_task_id) || isSignedIn; const handleEdit = (e: React.MouseEvent) => { e.stopPropagation(); @@ -248,14 +249,17 @@ export function ActionsDropdown({ {t('actionsMenu.stopShare')} - + {t('common:buttons.edit')} {t('actionsMenu.duplicate')} diff --git a/frontend/src/hooks/auth/useAuthStatus.ts b/frontend/src/hooks/auth/useAuthStatus.ts index 01ced359..74a65952 100644 --- a/frontend/src/hooks/auth/useAuthStatus.ts +++ b/frontend/src/hooks/auth/useAuthStatus.ts @@ -1,12 +1,14 @@ import { useQuery } from '@tanstack/react-query'; import { oauthApi } from '@/lib/api'; +import { useEffect } from 'react'; +import { useAuth } from '@/hooks'; interface UseAuthStatusOptions { enabled: boolean; } export function useAuthStatus(options: UseAuthStatusOptions) { - return useQuery({ + const query = useQuery({ queryKey: ['auth', 'status'], queryFn: () => oauthApi.status(), enabled: options.enabled, @@ -14,4 +16,13 @@ export function useAuthStatus(options: UseAuthStatusOptions) { retry: 3, staleTime: 0, // Always fetch fresh data when enabled }); + + const { isSignedIn } = useAuth(); + useEffect(() => { + if (query) { + query.refetch(); + } + }, [isSignedIn, query]); + + return query; } diff --git a/frontend/src/hooks/auth/useCurrentUser.ts b/frontend/src/hooks/auth/useCurrentUser.ts new file mode 100644 index 00000000..2701bf4b --- /dev/null +++ b/frontend/src/hooks/auth/useCurrentUser.ts @@ -0,0 +1,23 @@ +import { useQuery, useQueryClient } from '@tanstack/react-query'; +import { oauthApi } from '@/lib/api'; +import { useEffect } from 'react'; +import { useAuth } from '@/hooks/auth/useAuth'; + +export function useCurrentUser() { + const { isSignedIn } = useAuth(); + const query = useQuery({ + queryKey: ['auth', 'user'], + queryFn: () => oauthApi.getCurrentUser(), + retry: 2, + staleTime: 5 * 60 * 1000, // 5 minutes + refetchOnWindowFocus: false, + refetchOnReconnect: false, + }); + + const queryClient = useQueryClient(); + useEffect(() => { + queryClient.invalidateQueries({ queryKey: ['auth', 'user'] }); + }, [queryClient, isSignedIn]); + + return query; +} diff --git a/frontend/src/hooks/index.ts b/frontend/src/hooks/index.ts index 7317f83e..ad8da327 100644 --- a/frontend/src/hooks/index.ts +++ b/frontend/src/hooks/index.ts @@ -22,6 +22,7 @@ export { useTaskAttempts } from './useTaskAttempts'; export { useAuth } from './auth/useAuth'; export { useAuthMutations } from './auth/useAuthMutations'; export { useAuthStatus } from './auth/useAuthStatus'; +export { useCurrentUser } from './auth/useCurrentUser'; export { useUserOrganizations } from './useUserOrganizations'; export { useOrganizationSelection } from './useOrganizationSelection'; export { useOrganizationMembers } from './useOrganizationMembers'; diff --git a/frontend/src/hooks/useAssigneeUserName.ts b/frontend/src/hooks/useAssigneeUserName.ts new file mode 100644 index 00000000..1e50d7f2 --- /dev/null +++ b/frontend/src/hooks/useAssigneeUserName.ts @@ -0,0 +1,38 @@ +import { useQuery } from '@tanstack/react-query'; +import { getSharedTaskAssignees } from '@/lib/remoteApi'; +import type { SharedTask, UserData } from 'shared/types'; +import { useEffect, useMemo } from 'react'; + +interface UseAssigneeUserNamesOptions { + projectId: string | undefined; + sharedTasks?: SharedTask[]; +} + +export function useAssigneeUserNames(options: UseAssigneeUserNamesOptions) { + const { projectId, sharedTasks } = options; + + const { data: assignees, refetch } = useQuery({ + queryKey: ['project', 'assignees', projectId], + queryFn: () => getSharedTaskAssignees(projectId!), + enabled: Boolean(projectId), + staleTime: 5 * 60 * 1000, // 5 minutes + }); + + const assignedUserIds = useMemo(() => { + if (!sharedTasks) return null; + return Array.from( + new Set(sharedTasks.map((task) => task.assignee_user_id)) + ); + }, [sharedTasks]); + + // Refetch when assignee ids change + useEffect(() => { + if (!assignedUserIds) return; + refetch(); + }, [assignedUserIds, refetch]); + + return { + assignees, + refetchAssignees: refetch, + }; +} diff --git a/frontend/src/hooks/useAutoLinkSharedTasks.ts b/frontend/src/hooks/useAutoLinkSharedTasks.ts new file mode 100644 index 00000000..955fc6db --- /dev/null +++ b/frontend/src/hooks/useAutoLinkSharedTasks.ts @@ -0,0 +1,85 @@ +import { useEffect, useRef } from 'react'; +import { useCurrentUser } from '@/hooks/auth/useCurrentUser'; +import { useTaskMutations } from '@/hooks/useTaskMutations'; +import type { SharedTaskRecord } from './useProjectTasks'; +import type { SharedTaskDetails, TaskWithAttemptStatus } from 'shared/types'; + +interface UseAutoLinkSharedTasksProps { + sharedTasksById: Record; + localTasksById: Record; + referencedSharedIds: Set; + isLoading: boolean; + remoteProjectId?: string; + projectId?: string; +} + +/** + * Automatically links shared tasks that are assigned to the current user + * and don't have a corresponding local task yet. + */ +export function useAutoLinkSharedTasks({ + sharedTasksById, + localTasksById, + referencedSharedIds, + isLoading, + remoteProjectId, + projectId, +}: UseAutoLinkSharedTasksProps): void { + const { data: currentUser } = useCurrentUser(); + const { linkSharedTaskToLocal } = useTaskMutations(projectId); + const linkingInProgress = useRef>(new Set()); + const failedTasks = useRef>(new Set()); + + useEffect(() => { + if (!currentUser?.user_id || isLoading || !remoteProjectId || !projectId) { + return; + } + + const tasksToLink = Object.values(sharedTasksById).filter((task) => { + const isAssignedToCurrentUser = + task.assignee_user_id === currentUser.user_id; + const hasLocalTask = Boolean(localTasksById[task.id]); + const isAlreadyLinked = referencedSharedIds.has(task.id); + const isBeingLinked = linkingInProgress.current.has(task.id); + const hasFailed = failedTasks.current.has(task.id); + + return ( + isAssignedToCurrentUser && + !hasLocalTask && + !isAlreadyLinked && + !isBeingLinked && + !hasFailed + ); + }); + + tasksToLink.forEach((task) => { + linkingInProgress.current.add(task.id); + linkSharedTaskToLocal.mutate( + { + id: task.id, + project_id: projectId, + title: task.title, + description: task.description, + status: task.status, + } as SharedTaskDetails, + { + onError: () => { + failedTasks.current.add(task.id); + }, + onSettled: () => { + linkingInProgress.current.delete(task.id); + }, + } + ); + }); + }, [ + currentUser?.user_id, + sharedTasksById, + localTasksById, + referencedSharedIds, + isLoading, + remoteProjectId, + projectId, + linkSharedTaskToLocal, + ]); +} diff --git a/frontend/src/hooks/useProjectTasks.ts b/frontend/src/hooks/useProjectTasks.ts index 98a7c286..7ff5df94 100644 --- a/frontend/src/hooks/useProjectTasks.ts +++ b/frontend/src/hooks/useProjectTasks.ts @@ -1,20 +1,19 @@ import { useCallback, useMemo } from 'react'; import { useJsonPatchWsStream } from './useJsonPatchWsStream'; +import { useAuth } from '@/hooks'; import { useProject } from '@/contexts/ProjectContext'; +import { useLiveQuery, eq, isNull } from '@tanstack/react-db'; +import { sharedTasksCollection } from '@/lib/electric/sharedTasksCollection'; +import { useAssigneeUserNames } from './useAssigneeUserName'; +import { useAutoLinkSharedTasks } from './useAutoLinkSharedTasks'; import type { SharedTask, TaskStatus, TaskWithAttemptStatus, } from 'shared/types'; -export type SharedTaskRecord = Omit< - SharedTask, - 'version' | 'last_event_seq' -> & { - version: number; - last_event_seq: number | null; - created_at: string | Date; - updated_at: string | Date; +export type SharedTaskRecord = SharedTask & { + remote_project_id: string; assignee_first_name?: string | null; assignee_last_name?: string | null; assignee_username?: string | null; @@ -22,7 +21,6 @@ export type SharedTaskRecord = Omit< type TasksState = { tasks: Record; - shared_tasks: Record; }; export interface UseProjectTasksResult { @@ -43,14 +41,12 @@ export interface UseProjectTasksResult { */ export const useProjectTasks = (projectId: string): UseProjectTasksResult => { const { project } = useProject(); + const { isSignedIn } = useAuth(); const remoteProjectId = project?.remote_project_id; - const endpoint = `/api/tasks/stream/ws?project_id=${encodeURIComponent(projectId)}&remote_project_id=${encodeURIComponent(remoteProjectId ?? 'null')}`; + const endpoint = `/api/tasks/stream/ws?project_id=${encodeURIComponent(projectId)}`; - const initialData = useCallback( - (): TasksState => ({ tasks: {}, shared_tasks: {} }), - [] - ); + const initialData = useCallback((): TasksState => ({ tasks: {} }), []); const { data, isConnected, error } = useJsonPatchWsStream( endpoint, @@ -58,12 +54,67 @@ export const useProjectTasks = (projectId: string): UseProjectTasksResult => { initialData ); - const localTasksById = useMemo(() => data?.tasks ?? {}, [data?.tasks]); - const sharedTasksById = useMemo( - () => data?.shared_tasks ?? {}, - [data?.shared_tasks] + const sharedTasksQuery = useLiveQuery( + useCallback( + (q) => { + if (!remoteProjectId || !isSignedIn) { + return undefined; + } + return q + .from({ sharedTasks: sharedTasksCollection }) + .where(({ sharedTasks }) => + eq(sharedTasks.project_id, remoteProjectId) + ) + .where(({ sharedTasks }) => isNull(sharedTasks.deleted_at)); + }, + [remoteProjectId, isSignedIn] + ), + [remoteProjectId, isSignedIn] ); + const sharedTasksList = useMemo( + () => sharedTasksQuery.data ?? [], + [sharedTasksQuery.data] + ); + + const localTasksById = useMemo(() => data?.tasks ?? {}, [data?.tasks]); + + const referencedSharedIds = useMemo( + () => + new Set( + Object.values(localTasksById) + .map((task) => task.shared_task_id) + .filter((id): id is string => Boolean(id)) + ), + [localTasksById] + ); + + const { assignees } = useAssigneeUserNames({ + projectId: remoteProjectId || undefined, + sharedTasks: sharedTasksList, + }); + + const sharedTasksById = useMemo(() => { + if (!sharedTasksList) return {}; + const map: Record = {}; + const list = Array.isArray(sharedTasksList) ? sharedTasksList : []; + for (const task of list) { + const assignee = + task.assignee_user_id && assignees + ? assignees.find((a) => a.user_id === task.assignee_user_id) + : null; + map[task.id] = { + ...task, + status: task.status, + remote_project_id: task.project_id, + assignee_first_name: assignee?.first_name ?? null, + assignee_last_name: assignee?.last_name ?? null, + assignee_username: assignee?.username ?? null, + }; + } + return map; + }, [sharedTasksList, assignees]); + const { tasks, tasksById, tasksByStatus } = useMemo(() => { const merged: Record = { ...localTasksById }; const byStatus: Record = { @@ -104,12 +155,6 @@ export const useProjectTasks = (projectId: string): UseProjectTasksResult => { cancelled: [], }; - const referencedSharedIds = new Set( - Object.values(localTasksById) - .map((task) => task.shared_task_id) - .filter((id): id is string => Boolean(id)) - ); - Object.values(sharedTasksById).forEach((sharedTask) => { const hasLocal = Boolean(localTasksById[sharedTask.id]) || @@ -130,10 +175,20 @@ export const useProjectTasks = (projectId: string): UseProjectTasksResult => { }); return grouped; - }, [localTasksById, sharedTasksById]); + }, [localTasksById, sharedTasksById, referencedSharedIds]); const isLoading = !data && !error; // until first snapshot + // Auto-link shared tasks assigned to current user + useAutoLinkSharedTasks({ + sharedTasksById, + localTasksById, + referencedSharedIds, + isLoading, + remoteProjectId: project?.remote_project_id || undefined, + projectId, + }); + return { tasks, tasksById, diff --git a/frontend/src/hooks/useTaskMutations.ts b/frontend/src/hooks/useTaskMutations.ts index dc7b8b17..7e5fbaba 100644 --- a/frontend/src/hooks/useTaskMutations.ts +++ b/frontend/src/hooks/useTaskMutations.ts @@ -9,16 +9,18 @@ import type { Task, TaskWithAttemptStatus, UpdateTask, + SharedTaskDetails, } from 'shared/types'; +import { taskKeys } from './useTask'; export function useTaskMutations(projectId?: string) { const queryClient = useQueryClient(); const navigate = useNavigateWithSearch(); const invalidateQueries = (taskId?: string) => { - queryClient.invalidateQueries({ queryKey: ['tasks', projectId] }); + queryClient.invalidateQueries({ queryKey: taskKeys.all }); if (taskId) { - queryClient.invalidateQueries({ queryKey: ['task', taskId] }); + queryClient.invalidateQueries({ queryKey: taskKeys.byId(taskId) }); } }; @@ -107,6 +109,19 @@ export function useTaskMutations(projectId?: string) { }, }); + const linkSharedTaskToLocal = useMutation({ + mutationFn: (data: SharedTaskDetails) => tasksApi.linkToLocal(data), + onSuccess: (createdTask: Task | null) => { + console.log('Linked shared task to local successfully', createdTask); + if (createdTask) { + invalidateQueries(createdTask.id); + } + }, + onError: (err) => { + console.error('Failed to link shared task to local:', err); + }, + }); + return { createTask, createAndStart, @@ -114,5 +129,6 @@ export function useTaskMutations(projectId?: string) { deleteTask, shareTask, stopShareTask: unshareSharedTask, + linkSharedTaskToLocal, }; } diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 49f19a0a..c32bf53e 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -26,7 +26,6 @@ import { Tag, TagSearchParams, TaskWithAttemptStatus, - AssignSharedTaskResponse, UpdateProject, UpdateTask, UpdateTag, @@ -74,10 +73,14 @@ import { CreateScratch, UpdateScratch, PushError, + TokenResponse, + CurrentUserResponse, + SharedTaskResponse, + SharedTaskDetails, QueueStatus, } from 'shared/types'; -class ApiError extends Error { +export class ApiError extends Error { public status?: number; public error_data?: E; @@ -149,7 +152,9 @@ const handleApiResponseAsResult = async ( return { success: true, data: result.data as T }; }; -const handleApiResponse = async (response: Response): Promise => { +export const handleApiResponse = async ( + response: Response +): Promise => { if (!response.ok) { let errorMessage = `Request failed with status ${response.status}`; @@ -372,11 +377,10 @@ export const tasksApi = { reassign: async ( sharedTaskId: string, - data: { new_assignee_user_id: string | null; version?: number | null } - ): Promise => { + data: { new_assignee_user_id: string | null } + ): Promise => { const payload = { new_assignee_user_id: data.new_assignee_user_id, - version: data.version ?? null, }; const response = await makeRequest( @@ -387,7 +391,7 @@ export const tasksApi = { } ); - return handleApiResponse(response); + return handleApiResponse(response); }, unshare: async (sharedTaskId: string): Promise => { @@ -396,6 +400,14 @@ export const tasksApi = { }); return handleApiResponse(response); }, + + linkToLocal: async (data: SharedTaskDetails): Promise => { + const response = await makeRequest(`/api/shared-tasks/link-to-local`, { + method: 'POST', + body: JSON.stringify(data), + }); + return handleApiResponse(response); + }, }; // Task Attempts APIs @@ -650,7 +662,7 @@ export const fileSystemApi = { // Config APIs (backwards compatible) export const configApi = { getConfig: async (): Promise => { - const response = await makeRequest('/api/info'); + const response = await makeRequest('/api/info', { cache: 'no-store' }); return handleApiResponse(response); }, saveConfig: async (config: Config): Promise => { @@ -893,7 +905,9 @@ export const oauthApi = { }, status: async (): Promise => { - const response = await makeRequest('/api/auth/status'); + const response = await makeRequest('/api/auth/status', { + cache: 'no-store', + }); return handleApiResponse(response); }, @@ -909,6 +923,19 @@ export const oauthApi = { ); } }, + + /** Returns the current access token for the remote server (auto-refreshes if needed) */ + getToken: async (): Promise => { + const response = await makeRequest('/api/auth/token'); + if (!response.ok) return null; + return handleApiResponse(response); + }, + + /** Returns the user ID of the currently authenticated user */ + getCurrentUser: async (): Promise => { + const response = await makeRequest('/api/auth/user'); + return handleApiResponse(response); + }, }; // Organizations API diff --git a/frontend/src/lib/electric/config.ts b/frontend/src/lib/electric/config.ts new file mode 100644 index 00000000..e31d165b --- /dev/null +++ b/frontend/src/lib/electric/config.ts @@ -0,0 +1,15 @@ +import { oauthApi } from '../api'; +import { REMOTE_API_URL } from '@/lib/remoteApi'; + +export const createAuthenticatedShapeOptions = (table: string) => ({ + url: `${REMOTE_API_URL}/v1/shape/${table}`, + headers: { + Authorization: async () => { + const tokenResponse = await oauthApi.getToken(); + return tokenResponse ? `Bearer ${tokenResponse.access_token}` : ''; + }, + }, + parser: { + timestamptz: (value: string) => value, + }, +}); diff --git a/frontend/src/lib/electric/sharedTasksCollection.ts b/frontend/src/lib/electric/sharedTasksCollection.ts new file mode 100644 index 00000000..94a394db --- /dev/null +++ b/frontend/src/lib/electric/sharedTasksCollection.ts @@ -0,0 +1,13 @@ +import { createCollection } from '@tanstack/react-db'; +import { electricCollectionOptions } from '@tanstack/electric-db-collection'; +import type { SharedTask } from 'shared/types'; + +import { createAuthenticatedShapeOptions } from './config'; + +export const sharedTasksCollection = createCollection( + electricCollectionOptions({ + id: 'shared_tasks', + getKey: (task) => task.id, + shapeOptions: createAuthenticatedShapeOptions('shared_tasks'), + }) +); diff --git a/frontend/src/lib/remoteApi.ts b/frontend/src/lib/remoteApi.ts new file mode 100644 index 00000000..1701a2d1 --- /dev/null +++ b/frontend/src/lib/remoteApi.ts @@ -0,0 +1,47 @@ +import { oauthApi, ApiError } from './api'; +import { UserData, AssigneesQuery } from 'shared/types'; + +export const REMOTE_API_URL = import.meta.env.VITE_VK_SHARED_API_BASE || ''; + +const makeRequest = async (path: string, options: RequestInit = {}) => { + const tokenRes = await oauthApi.getToken(); + if (!tokenRes?.access_token) { + throw new Error('Not authenticated'); + } + + const headers = new Headers(options.headers ?? {}); + if (!headers.has('Content-Type')) { + headers.set('Content-Type', 'application/json'); + } + headers.set('Authorization', `Bearer ${tokenRes.access_token}`); + + console.log('VITE_VK_SHARED_API_BASE:', REMOTE_API_URL); + + return fetch(`${REMOTE_API_URL}${path}`, { + ...options, + headers, + credentials: 'include', + }); +}; + +export const getSharedTaskAssignees = async ( + projectId: string +): Promise => { + const response = await makeRequest( + `/v1/tasks/assignees?${new URLSearchParams({ + project_id: projectId, + } as AssigneesQuery)}` + ); + + if (!response.ok) { + let message = `Request failed with status ${response.status}`; + try { + const err = await response.json(); + if (err?.message) message = err.message; + } catch { + // empty + } + throw new ApiError(message, response.status, response); + } + return response.json(); +}; diff --git a/frontend/src/pages/ProjectTasks.tsx b/frontend/src/pages/ProjectTasks.tsx index 6dc36f7d..aa237411 100644 --- a/frontend/src/pages/ProjectTasks.tsx +++ b/frontend/src/pages/ProjectTasks.tsx @@ -447,9 +447,6 @@ export function ProjectTasks() { const getTimestamp = (item: KanbanColumnItem) => { const createdAt = item.type === 'task' ? item.task.created_at : item.task.created_at; - if (createdAt instanceof Date) { - return createdAt.getTime(); - } return new Date(createdAt).getTime(); }; diff --git a/frontend/src/pages/settings/OrganizationSettings.tsx b/frontend/src/pages/settings/OrganizationSettings.tsx index d6b4d5dc..fb74d65b 100644 --- a/frontend/src/pages/settings/OrganizationSettings.tsx +++ b/frontend/src/pages/settings/OrganizationSettings.tsx @@ -143,15 +143,10 @@ export function OrganizationSettings() { const { data: remoteProjects = [], isLoading: loadingRemoteProjects } = useOrganizationProjects(selectedOrgId); - // Calculate available local projects (not linked to any remote project in this org) - const remoteProjectIds = remoteProjects.map((rp) => rp.id); - const availableLocalProjects = allProjects.filter((project) => { - // Project is available if it has no remote link OR if it's linked to a project outside this org - return ( - !project.remote_project_id || - !remoteProjectIds.includes(project.remote_project_id) - ); - }); + // Calculate available local projects (not linked to any remote project) + const availableLocalProjects = allProjects.filter( + (project) => !project.remote_project_id + ); // Project mutations const { linkToExisting, unlinkProject } = useProjectMutations({ diff --git a/frontend/src/styles/index.css b/frontend/src/styles/index.css index 2c040160..b49031f8 100644 --- a/frontend/src/styles/index.css +++ b/frontend/src/styles/index.css @@ -28,7 +28,7 @@ /* Status (light) */ --_success: 142.1 76.2% 36.3%; - --_success-foreground: 138.5 76.5% 96.7%; + --_success-foreground: 142.1 76.2% 20%; --_warning: 32.2 95% 44.1%; --_warning-foreground: 26 83.3% 14.1%; --_info: 217.2 91.2% 59.8%; diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index f0ae9a29..27e73ca8 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -68,12 +68,15 @@ export default defineConfig({ target: `http://localhost:${process.env.BACKEND_PORT || "3001"}`, changeOrigin: true, ws: true, - }, + } }, fs: { allow: [path.resolve(__dirname, "."), path.resolve(__dirname, "..")], }, open: process.env.VITE_OPEN === "true", }, + optimizeDeps: { + exclude: ["wa-sqlite"], + }, build: { sourcemap: true }, }); diff --git a/frontend_server.log b/frontend_server.log new file mode 100644 index 00000000..e69de29b diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e24553bf..52f211a2 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -13,7 +13,7 @@ importers: version: 8.2.2 vite: specifier: ^6.3.5 - version: 6.3.5(jiti@1.21.7)(yaml@2.8.0) + version: 6.3.5(@types/node@24.10.1)(jiti@1.21.7)(yaml@2.8.0) frontend: dependencies: @@ -104,6 +104,12 @@ importers: '@tailwindcss/typography': specifier: ^0.5.16 version: 0.5.16(tailwindcss@3.4.17) + '@tanstack/electric-db-collection': + specifier: ^0.2.6 + version: 0.2.6(typescript@5.9.2) + '@tanstack/react-db': + specifier: ^0.1.50 + version: 0.1.50(react@18.3.1)(typescript@5.9.2) '@tanstack/react-devtools': specifier: ^0.8.0 version: 0.8.0(@types/react-dom@18.3.7(@types/react@18.3.23))(@types/react@18.3.23)(csstype@3.1.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(solid-js@1.9.10) @@ -206,6 +212,9 @@ importers: vibe-kanban-web-companion: specifier: ^0.0.4 version: 0.0.4(@types/react@18.3.23)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + wa-sqlite: + specifier: ^1.0.0 + version: 1.0.0 zod: specifier: ^4.1.12 version: 4.1.12 @@ -242,7 +251,7 @@ importers: version: 6.21.0(eslint@8.57.1)(typescript@5.9.2) '@vitejs/plugin-react': specifier: ^4.2.1 - version: 4.5.2(vite@5.4.19) + version: 4.5.2(vite@5.4.19(@types/node@24.10.1)) autoprefixer: specifier: ^10.4.16 version: 10.4.21(postcss@8.5.6) @@ -287,7 +296,7 @@ importers: version: 5.9.2 vite: specifier: ^5.0.8 - version: 5.4.19 + version: 5.4.19(@types/node@24.10.1) remote-frontend: dependencies: @@ -309,7 +318,7 @@ importers: version: 18.3.7(@types/react@18.3.23) '@vitejs/plugin-react': specifier: ^4.2.1 - version: 4.5.2(vite@5.4.19) + version: 4.5.2(vite@5.4.19(@types/node@24.10.1)) autoprefixer: specifier: ^10.4.16 version: 10.4.21(postcss@8.5.6) @@ -324,7 +333,7 @@ importers: version: 5.9.2 vite: specifier: ^5.0.8 - version: 5.4.19 + version: 5.4.19(@types/node@24.10.1) packages: @@ -474,6 +483,9 @@ packages: react: '>16.8.0' react-dom: '>16.8.0' + '@electric-sql/client@1.2.0': + resolution: {integrity: sha512-K/MEjti3UF4aPKJJqO6Tp4f5noqc2/3icU1NPdpKfQaHwbzGtEX2aJmL2vxTEUJbfyrISkPKbOPnrz/lAvw1Vg==} + '@esbuild/aix-ppc64@0.21.5': resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} engines: {node: '>=12'} @@ -971,6 +983,9 @@ packages: '@marijn/find-cluster-break@1.0.2': resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==} + '@microsoft/fetch-event-source@2.0.1': + resolution: {integrity: sha512-W6CLUJ2eBMw3Rec70qrsEW0jOm/3twwJv21mrmj2yORiaVmVYGS4sSS5yUwvQc1ZlDLYGPnClVWmUUMagKNsfA==} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -1796,6 +1811,9 @@ packages: peerDependencies: solid-js: ^1.6.12 + '@standard-schema/spec@1.0.0': + resolution: {integrity: sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==} + '@tailwindcss/container-queries@0.1.1': resolution: {integrity: sha512-p18dswChx6WnTSaJCSGx6lTmrGzNNvm2FtXmiO6AuA1V4U5REyoqwmT6kgAsIMdjo07QdAfYXHJ4hnMtfHzWgA==} peerDependencies: @@ -1806,6 +1824,16 @@ packages: peerDependencies: tailwindcss: '>=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1' + '@tanstack/db-ivm@0.1.13': + resolution: {integrity: sha512-sBOWGY4tqMEym2ewjdWrDb5c5c8akvgnEbGVPAtkfFS3QVV0zfVb5RJAkAc8GSxb3ByVfYjyaShVr0kMJhMuow==} + peerDependencies: + typescript: '>=4.7' + + '@tanstack/db@0.5.6': + resolution: {integrity: sha512-K6fYKCVp0abDoRDO+RScfmFOf00KNBtJXusFH+bBSc0LsbDxAjScph4fsX2pl6RPF74sh98IAQ6df2znlN09Iw==} + peerDependencies: + typescript: '>=4.7' + '@tanstack/devtools-client@0.0.4': resolution: {integrity: sha512-LefnH9KE9uRDEWifc3QDcooskA8ikfs41bybDTgpYQpyTUspZnaEdUdya9Hry0KYxZ8nos0S3nNbsP79KHqr6Q==} engines: {node: '>=18'} @@ -1830,6 +1858,9 @@ packages: peerDependencies: solid-js: '>=1.9.7' + '@tanstack/electric-db-collection@0.2.6': + resolution: {integrity: sha512-3J5mLxQ+PWwAxwsJy3Paygfd8dwUcs/oy1RxWjkIowRkYc1JA9x2/KSzX7ovzmYWbD87juHeLb73leuaWmqhow==} + '@tanstack/form-core@1.24.4': resolution: {integrity: sha512-+eIR7DiDamit1zvTVgaHxuIRA02YFgJaXMUGxsLRJoBpUjGl/g/nhUocQoNkRyfXqOlh8OCMTanjwDprWSRq6w==} @@ -1838,6 +1869,10 @@ packages: peerDependencies: solid-js: '>=1.9.9' + '@tanstack/pacer-lite@0.1.0': + resolution: {integrity: sha512-a5A0PI0H4npUy7u3VOjOhdynXnRBna+mDvpt8ghDCVzS3Tgn8DlGzHlRqS2rKJP8ZcLuVO2qxlIIblhcoaiv8Q==} + engines: {node: '>=18'} + '@tanstack/pacer@0.15.4': resolution: {integrity: sha512-vGY+CWsFZeac3dELgB6UZ4c7OacwsLb8hvL2gLS6hTgy8Fl0Bm/aLokHaeDIP+q9F9HUZTnp360z9uv78eg8pg==} engines: {node: '>=18'} @@ -1845,6 +1880,11 @@ packages: '@tanstack/query-core@5.85.5': resolution: {integrity: sha512-KO0WTob4JEApv69iYp1eGvfMSUkgw//IpMnq+//cORBzXf0smyRwPLrUvEe5qtAEGjwZTXrjxg+oJNP/C00t6w==} + '@tanstack/react-db@0.1.50': + resolution: {integrity: sha512-0AVEJlgLJKj1eW+Gfva1rGj9xFuk29RM0Cr+i8JpReN6gmlb/Uol68WJQ+CUfxHz6sPFoqPhJUywLRC9Q0SVWg==} + peerDependencies: + react: '>=16.8.0' + '@tanstack/react-devtools@0.8.0': resolution: {integrity: sha512-0TsFICBPr68us3iWHFXCIBSEilTo8j1OdIJLW48LNQNjC/Puno82uqX4qFuaZWfZv6K37QnS6UeRxzWJItMFSA==} engines: {node: '>=18'} @@ -1882,6 +1922,9 @@ packages: '@tanstack/store@0.7.7': resolution: {integrity: sha512-xa6pTan1bcaqYDS9BDpSiS63qa6EoDkPN9RsRaxHuDdVDNntzq3xNwR5YKTU/V3SkSyC9T4YVOPh2zRQN0nhIQ==} + '@tanstack/store@0.8.0': + resolution: {integrity: sha512-Om+BO0YfMZe//X2z0uLF2j+75nQga6TpTJgLJQBiq85aOyZNIhkCgleNcud2KQg4k4v9Y9l+Uhru3qWMPGTOzQ==} + '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -1906,6 +1949,9 @@ packages: '@types/lodash@4.17.20': resolution: {integrity: sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA==} + '@types/node@24.10.1': + resolution: {integrity: sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==} + '@types/prop-types@15.7.15': resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} @@ -2241,6 +2287,15 @@ packages: supports-color: optional: true + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + decode-formdata@0.9.0: resolution: {integrity: sha512-q5uwOjR3Um5YD+ZWPOF/1sGHVW9A5rCrRwITQChRXlmPkxDFBqCm4jNTIVdGHNH9OnR+V9MoZVgRhsFb+ARbUw==} @@ -2488,6 +2543,10 @@ packages: fraction.js@4.3.7: resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} + fractional-indexing@3.2.0: + resolution: {integrity: sha512-PcOxmqwYCW7O2ovKRU8OoQQj2yqTfEB/yeTYk4gPid6dN5ODRfU1hXd9tTVZzax/0NkO7AxpHykvZnT1aYp/BQ==} + engines: {node: ^14.13.1 || >=16.0.0} + framer-motion@12.23.24: resolution: {integrity: sha512-HMi5HRoRCTou+3fb3h9oTLyJGBxHfW+HnNE25tAXOvVx/IvwMHK0cx7IR4a2ZU6sh3IX1Z+4ts32PcYBOqka8w==} peerDependencies: @@ -3257,6 +3316,9 @@ packages: solid-js@1.9.10: resolution: {integrity: sha512-Coz956cos/EPDlhs6+jsdTxKuJDPT7B5SVIWgABwROyxjY7Xbr8wkzD68Et+NxnV7DLJ3nJdAC2r9InuV/4Jew==} + sorted-btree@1.8.1: + resolution: {integrity: sha512-395+XIP+wqNn3USkFSrNz7G3Ss/MXlZEqesxvzCRFwL14h6e8LukDHdLBePn5pwbm5OQ9vGu8mDyz2lLDIqamQ==} + source-map-js@1.2.1: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} @@ -3377,6 +3439,9 @@ packages: engines: {node: '>=14.17'} hasBin: true + undici-types@7.16.0: + resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} + unplugin@1.0.1: resolution: {integrity: sha512-aqrHaVBWW1JVKBHmGo33T5TxeL0qWzfvjWokObHA9bYmN7eNDkwOxmLjhioHl9878qDFMAaT51XNroRyuz7WxA==} @@ -3423,6 +3488,11 @@ packages: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} @@ -3528,6 +3598,9 @@ packages: w3c-keyname@2.2.8: resolution: {integrity: sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==} + wa-sqlite@1.0.0: + resolution: {integrity: sha512-Kyybo5/BaJp76z7gDWGk2J6Hthl4NIPsE+swgraEjy3IY6r5zIR02wAs1OJH4XtJp1y3puj3Onp5eMGS0z7nUA==} + web-vitals@4.2.4: resolution: {integrity: sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw==} @@ -3831,6 +3904,12 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) + '@electric-sql/client@1.2.0': + dependencies: + '@microsoft/fetch-event-source': 2.0.1 + optionalDependencies: + '@rollup/rollup-darwin-arm64': 4.44.0 + '@esbuild/aix-ppc64@0.21.5': optional: true @@ -4316,6 +4395,8 @@ snapshots: '@marijn/find-cluster-break@1.0.2': {} + '@microsoft/fetch-event-source@2.0.1': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -5174,6 +5255,8 @@ snapshots: dependencies: solid-js: 1.9.10 + '@standard-schema/spec@1.0.0': {} + '@tailwindcss/container-queries@0.1.1(tailwindcss@3.4.17)': dependencies: tailwindcss: 3.4.17 @@ -5186,6 +5269,19 @@ snapshots: postcss-selector-parser: 6.0.10 tailwindcss: 3.4.17 + '@tanstack/db-ivm@0.1.13(typescript@5.9.2)': + dependencies: + fractional-indexing: 3.2.0 + sorted-btree: 1.8.1 + typescript: 5.9.2 + + '@tanstack/db@0.5.6(typescript@5.9.2)': + dependencies: + '@standard-schema/spec': 1.0.0 + '@tanstack/db-ivm': 0.1.13(typescript@5.9.2) + '@tanstack/pacer-lite': 0.1.0 + typescript: 5.9.2 + '@tanstack/devtools-client@0.0.4': dependencies: '@tanstack/devtools-event-client': 0.3.4 @@ -5223,6 +5319,17 @@ snapshots: - csstype - utf-8-validate + '@tanstack/electric-db-collection@0.2.6(typescript@5.9.2)': + dependencies: + '@electric-sql/client': 1.2.0 + '@standard-schema/spec': 1.0.0 + '@tanstack/db': 0.5.6(typescript@5.9.2) + '@tanstack/store': 0.8.0 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + - typescript + '@tanstack/form-core@1.24.4': dependencies: '@tanstack/devtools-event-client': 0.3.4 @@ -5240,6 +5347,8 @@ snapshots: transitivePeerDependencies: - csstype + '@tanstack/pacer-lite@0.1.0': {} + '@tanstack/pacer@0.15.4': dependencies: '@tanstack/devtools-event-client': 0.3.4 @@ -5247,6 +5356,14 @@ snapshots: '@tanstack/query-core@5.85.5': {} + '@tanstack/react-db@0.1.50(react@18.3.1)(typescript@5.9.2)': + dependencies: + '@tanstack/db': 0.5.6(typescript@5.9.2) + react: 18.3.1 + use-sync-external-store: 1.6.0(react@18.3.1) + transitivePeerDependencies: + - typescript + '@tanstack/react-devtools@0.8.0(@types/react-dom@18.3.7(@types/react@18.3.23))(@types/react@18.3.23)(csstype@3.1.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(solid-js@1.9.10)': dependencies: '@tanstack/devtools': 0.8.0(csstype@3.1.3)(solid-js@1.9.10) @@ -5292,6 +5409,8 @@ snapshots: '@tanstack/store@0.7.7': {} + '@tanstack/store@0.8.0': {} + '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.27.5 @@ -5323,6 +5442,11 @@ snapshots: '@types/lodash@4.17.20': {} + '@types/node@24.10.1': + dependencies: + undici-types: 7.16.0 + optional: true + '@types/prop-types@15.7.15': {} '@types/react-dom@18.3.7(@types/react@18.3.23)': @@ -5468,7 +5592,7 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@vitejs/plugin-react@4.5.2(vite@5.4.19)': + '@vitejs/plugin-react@4.5.2(vite@5.4.19(@types/node@24.10.1))': dependencies: '@babel/core': 7.27.4 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.27.4) @@ -5476,7 +5600,7 @@ snapshots: '@rolldown/pluginutils': 1.0.0-beta.11 '@types/babel__core': 7.20.5 react-refresh: 0.17.0 - vite: 5.4.19 + vite: 5.4.19(@types/node@24.10.1) transitivePeerDependencies: - supports-color @@ -5709,6 +5833,10 @@ snapshots: dependencies: ms: 2.1.3 + debug@4.4.3: + dependencies: + ms: 2.1.3 + decode-formdata@0.9.0: {} deep-is@0.1.4: {} @@ -5997,6 +6125,8 @@ snapshots: fraction.js@4.3.7: {} + fractional-indexing@3.2.0: {} + framer-motion@12.23.24(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: motion-dom: 12.23.23 @@ -6666,6 +6796,8 @@ snapshots: seroval: 1.3.2 seroval-plugins: 1.3.3(seroval@1.3.2) + sorted-btree@1.8.1: {} + source-map-js@1.2.1: {} spawn-command@0.0.2: {} @@ -6794,6 +6926,9 @@ snapshots: typescript@5.9.2: {} + undici-types@7.16.0: + optional: true + unplugin@1.0.1: dependencies: acorn: 8.15.0 @@ -6836,6 +6971,10 @@ snapshots: dependencies: react: 18.3.1 + use-sync-external-store@1.6.0(react@18.3.1): + dependencies: + react: 18.3.1 + util-deprecate@1.0.2: {} uuid@13.0.0: {} @@ -6865,15 +7004,16 @@ snapshots: - '@types/react' - react-dom - vite@5.4.19: + vite@5.4.19(@types/node@24.10.1): dependencies: esbuild: 0.21.5 postcss: 8.5.6 rollup: 4.44.0 optionalDependencies: + '@types/node': 24.10.1 fsevents: 2.3.3 - vite@6.3.5(jiti@1.21.7)(yaml@2.8.0): + vite@6.3.5(@types/node@24.10.1)(jiti@1.21.7)(yaml@2.8.0): dependencies: esbuild: 0.25.5 fdir: 6.4.6(picomatch@4.0.2) @@ -6882,6 +7022,7 @@ snapshots: rollup: 4.44.0 tinyglobby: 0.2.14 optionalDependencies: + '@types/node': 24.10.1 fsevents: 2.3.3 jiti: 1.21.7 yaml: 2.8.0 @@ -6890,6 +7031,8 @@ snapshots: w3c-keyname@2.2.8: {} + wa-sqlite@1.0.0: {} + web-vitals@4.2.4: {} webidl-conversions@3.0.1: {} diff --git a/shared/types.ts b/shared/types.ts index f36c1e04..44952000 100644 --- a/shared/types.ts +++ b/shared/types.ts @@ -4,9 +4,13 @@ // If you are an AI, and you absolutely have to edit this file, please confirm with the user first. -export type DirectoryEntry = { name: string, path: string, is_directory: boolean, is_git_repo: boolean, last_modified: bigint | null, }; +export type SharedTaskResponse = { task: SharedTask, user: UserData | null, }; -export type DirectoryListResponse = { entries: Array, current_path: string, }; +export type AssigneesQuery = { project_id: string, }; + +export type SharedTask = { id: string, organization_id: string, project_id: string, creator_user_id: string | null, assignee_user_id: string | null, deleted_by_user_id: string | null, title: string, description: string | null, status: TaskStatus, deleted_at: string | null, shared_at: string | null, created_at: string, updated_at: string, }; + +export type UserData = { user_id: string, first_name: string | null, last_name: string | null, username: string | null, }; export type Project = { id: string, name: string, git_repo_path: string, setup_script: string | null, dev_script: string | null, cleanup_script: string | null, copy_files: string | null, remote_project_id: string | null, created_at: Date, updated_at: Date, }; @@ -18,34 +22,12 @@ export type SearchResult = { path: string, is_file: boolean, match_type: SearchM export type SearchMatchType = "FileName" | "DirectoryName" | "FullPath"; -export type CreateRemoteProjectRequest = { organization_id: string, name: string, }; - -export type LinkToExistingRequest = { remote_project_id: string, }; - -export type ExecutorAction = { typ: ExecutorActionType, next_action: ExecutorAction | null, }; - -export type McpConfig = { servers: { [key in string]?: JsonValue }, servers_path: Array, template: JsonValue, preconfigured: JsonValue, is_toml_config: boolean, }; - -export type ExecutorActionType = { "type": "CodingAgentInitialRequest" } & CodingAgentInitialRequest | { "type": "CodingAgentFollowUpRequest" } & CodingAgentFollowUpRequest | { "type": "ScriptRequest" } & ScriptRequest; - -export type ScriptContext = "SetupScript" | "CleanupScript" | "DevServer" | "ToolInstallScript"; - -export type ScriptRequest = { script: string, language: ScriptRequestLanguage, context: ScriptContext, }; - -export type ScriptRequestLanguage = "Bash"; - -export enum BaseCodingAgent { CLAUDE_CODE = "CLAUDE_CODE", AMP = "AMP", GEMINI = "GEMINI", CODEX = "CODEX", OPENCODE = "OPENCODE", CURSOR_AGENT = "CURSOR_AGENT", QWEN_CODE = "QWEN_CODE", COPILOT = "COPILOT", DROID = "DROID" } - -export type CodingAgent = { "CLAUDE_CODE": ClaudeCode } | { "AMP": Amp } | { "GEMINI": Gemini } | { "CODEX": Codex } | { "OPENCODE": Opencode } | { "CURSOR_AGENT": CursorAgent } | { "QWEN_CODE": QwenCode } | { "COPILOT": Copilot } | { "DROID": Droid }; - export type Tag = { id: string, tag_name: string, content: string, created_at: string, updated_at: string, }; export type CreateTag = { tag_name: string, content: string, }; export type UpdateTag = { tag_name: string | null, content: string | null, }; -export type TagSearchParams = { search: string | null, }; - export type TaskStatus = "todo" | "inprogress" | "inreview" | "done" | "cancelled"; export type Task = { id: string, project_id: string, title: string, description: string | null, status: TaskStatus, parent_task_attempt: string | null, shared_task_id: string | null, created_at: string, updated_at: string, }; @@ -58,8 +40,6 @@ export type CreateTask = { project_id: string, title: string, description: strin export type UpdateTask = { title: string | null, description: string | null, status: TaskStatus | null, parent_task_attempt: string | null, image_ids: Array | null, }; -export type SharedTask = { id: string, remote_project_id: string, title: string, description: string | null, status: TaskStatus, assignee_user_id: string | null, assignee_first_name: string | null, assignee_last_name: string | null, assignee_username: string | null, version: bigint, last_event_seq: bigint | null, created_at: Date, updated_at: Date, }; - export type DraftFollowUpData = { message: string, variant: string | null, }; export type ScratchPayload = { "type": "DRAFT_TASK", "data": string } | { "type": "DRAFT_FOLLOW_UP", "data": DraftFollowUpData }; @@ -72,26 +52,60 @@ export type CreateScratch = { payload: ScratchPayload, }; export type UpdateScratch = { payload: ScratchPayload, }; -export type QueuedMessage = { -/** - * The task attempt this message is queued for - */ -task_attempt_id: string, -/** - * The follow-up data (message + variant) - */ -data: DraftFollowUpData, -/** - * Timestamp when the message was queued - */ -queued_at: string, }; - -export type QueueStatus = { "status": "empty" } | { "status": "queued", message: QueuedMessage, }; - export type Image = { id: string, file_path: string, original_name: string, mime_type: string | null, size_bytes: bigint, hash: string, created_at: string, updated_at: string, }; export type CreateImage = { file_path: string, original_name: string, mime_type: string | null, size_bytes: bigint, hash: string, }; +export type TaskAttempt = { id: string, task_id: string, container_ref: string | null, branch: string, target_branch: string, executor: string, worktree_deleted: boolean, setup_completed_at: string | null, created_at: string, updated_at: string, }; + +export type ExecutionProcess = { id: string, task_attempt_id: string, run_reason: ExecutionProcessRunReason, executor_action: ExecutorAction, +/** + * Git HEAD commit OID captured before the process starts + */ +before_head_commit: string | null, +/** + * Git HEAD commit OID captured after the process ends + */ +after_head_commit: string | null, status: ExecutionProcessStatus, exit_code: bigint | null, +/** + * dropped: true if this process is excluded from the current + * history view (due to restore/trimming). Hidden from logs/timeline; + * still listed in the Processes tab. + */ +dropped: boolean, started_at: string, completed_at: string | null, created_at: string, updated_at: string, }; + +export enum ExecutionProcessStatus { running = "running", completed = "completed", failed = "failed", killed = "killed" } + +export type ExecutionProcessRunReason = "setupscript" | "cleanupscript" | "codingagent" | "devserver"; + +export type Merge = { "type": "direct" } & DirectMerge | { "type": "pr" } & PrMerge; + +export type DirectMerge = { id: string, task_attempt_id: string, merge_commit: string, target_branch_name: string, created_at: string, }; + +export type PrMerge = { id: string, task_attempt_id: string, created_at: string, target_branch_name: string, pr_info: PullRequestInfo, }; + +export type MergeStatus = "open" | "merged" | "closed" | "unknown"; + +export type PullRequestInfo = { number: bigint, url: string, status: MergeStatus, merged_at: string | null, merge_commit_sha: string | null, }; + +export type ApprovalStatus = { "status": "pending" } | { "status": "approved" } | { "status": "denied", reason?: string, } | { "status": "timed_out" }; + +export type CreateApprovalRequest = { tool_name: string, tool_input: JsonValue, tool_call_id: string, }; + +export type ApprovalResponse = { execution_process_id: string, status: ApprovalStatus, }; + +export type Diff = { change: DiffChangeKind, oldPath: string | null, newPath: string | null, oldContent: string | null, newContent: string | null, +/** + * True when file contents are intentionally omitted (e.g., too large) + */ +contentOmitted: boolean, +/** + * Optional precomputed stats for omitted content + */ +additions: number | null, deletions: number | null, }; + +export type DiffChangeKind = "added" | "deleted" | "modified" | "renamed" | "copied" | "permissionChange"; + export type ApiResponse = { success: boolean, data: T | null, error_data: E | null, message: string | null, }; export type LoginStatus = { "status": "loggedout" } | { "status": "loggedin", profile: ProfileResponse, }; @@ -150,6 +164,14 @@ export type ListProjectsResponse = { projects: Array, }; export type RemoteProjectMembersResponse = { organization_id: string, members: Array, }; +export type CreateRemoteProjectRequest = { organization_id: string, name: string, }; + +export type LinkToExistingRequest = { remote_project_id: string, }; + +export type TagSearchParams = { search: string | null, }; + +export type TokenResponse = { access_token: string, expires_at: string | null, }; + export type UserSystemInfo = { config: Config, analytics_user_id: string, login_status: LoginStatus, environment: Environment, /** * Capabilities supported per executor (e.g., { "CLAUDE_CODE": ["SESSION_FORK"] }) @@ -170,7 +192,7 @@ export type CheckEditorAvailabilityResponse = { available: boolean, }; export type CheckAgentAvailabilityQuery = { executor: BaseCodingAgent, }; -export type AvailabilityInfo = { "type": "LOGIN_DETECTED", last_auth_timestamp: bigint, } | { "type": "INSTALLATION_FOUND" } | { "type": "NOT_FOUND" }; +export type CurrentUserResponse = { user_id: string, }; export type CreateFollowUpAttempt = { prompt: string, variant: string | null, retry_process_id: string | null, force_when_dirty: boolean | null, perform_git_reset: boolean | null, }; @@ -188,9 +210,7 @@ export type OpenEditorRequest = { editor_type: string | null, file_path: string export type OpenEditorResponse = { url: string | null, }; -export type AssignSharedTaskRequest = { new_assignee_user_id: string | null, version: bigint | null, }; - -export type AssignSharedTaskResponse = { shared_task: SharedTask, }; +export type AssignSharedTaskRequest = { new_assignee_user_id: string | null, }; export type ShareTaskResponse = { shared_task_id: string, }; @@ -202,6 +222,44 @@ export type ImageResponse = { id: string, file_path: string, original_name: stri export type ImageMetadata = { exists: boolean, file_name: string | null, path: string | null, size_bytes: bigint | null, format: string | null, proxy_url: string | null, }; +export type CreateTaskAttemptBody = { task_id: string, +/** + * Executor profile specification + */ +executor_profile_id: ExecutorProfileId, base_branch: string, }; + +export type RunAgentSetupRequest = { executor_profile_id: ExecutorProfileId, }; + +export type RunAgentSetupResponse = Record; + +export type GhCliSetupError = "BREW_MISSING" | "SETUP_HELPER_NOT_SUPPORTED" | { "OTHER": { message: string, } }; + +export type RebaseTaskAttemptRequest = { old_base_branch: string | null, new_base_branch: string | null, }; + +export type GitOperationError = { "type": "merge_conflicts", message: string, op: ConflictOp, } | { "type": "rebase_in_progress" }; + +export type PushError = { "type": "force_push_required" }; + +export type CreatePrError = { "type": "github_cli_not_installed" } | { "type": "github_cli_not_logged_in" } | { "type": "git_cli_not_logged_in" } | { "type": "git_cli_not_installed" } | { "type": "target_branch_not_found", branch: string, }; + +export type BranchStatus = { commits_behind: number | null, commits_ahead: number | null, has_uncommitted_changes: boolean | null, head_oid: string | null, uncommitted_count: number | null, untracked_count: number | null, target_branch_name: string, remote_commits_behind: number | null, remote_commits_ahead: number | null, merges: Array, +/** + * True if a `git rebase` is currently in progress in this worktree + */ +is_rebase_in_progress: boolean, +/** + * Current conflict operation if any + */ +conflict_op: ConflictOp | null, +/** + * List of files currently in conflicted (unmerged) state + */ +conflicted_files: Array, }; + +export type DirectoryEntry = { name: string, path: string, is_directory: boolean, is_git_repo: boolean, last_modified: bigint | null, }; + +export type DirectoryListResponse = { entries: Array, current_path: string, }; + export type Config = { config_version: string, theme: ThemeMode, executor_profile: ExecutorProfileId, disclaimer_acknowledged: boolean, onboarding_acknowledged: boolean, notifications: NotificationConfig, editor: EditorConfig, github: GitHubConfig, analytics_enabled: boolean, workspace_dir: string | null, last_app_version: string | null, show_release_notes: boolean, language: UiLanguage, git_branch_prefix: string, showcases: ShowcaseState, }; export type NotificationConfig = { sound_enabled: boolean, push_enabled: boolean, sound_file: SoundFile, }; @@ -224,17 +282,43 @@ export type ShowcaseState = { seen_features: Array, }; export type GitBranch = { name: string, is_current: boolean, is_remote: boolean, last_commit_date: Date, }; -export type Diff = { change: DiffChangeKind, oldPath: string | null, newPath: string | null, oldContent: string | null, newContent: string | null, -/** - * True when file contents are intentionally omitted (e.g., too large) - */ -contentOmitted: boolean, -/** - * Optional precomputed stats for omitted content - */ -additions: number | null, deletions: number | null, }; +export type SharedTaskDetails = { id: string, project_id: string, title: string, description: string | null, status: TaskStatus, }; -export type DiffChangeKind = "added" | "deleted" | "modified" | "renamed" | "copied" | "permissionChange"; +export type QueuedMessage = { +/** + * The task attempt this message is queued for + */ +task_attempt_id: string, +/** + * The follow-up data (message + variant) + */ +data: DraftFollowUpData, +/** + * Timestamp when the message was queued + */ +queued_at: string, }; + +export type QueueStatus = { "status": "empty" } | { "status": "queued", message: QueuedMessage, }; + +export type ConflictOp = "rebase" | "merge" | "cherry_pick" | "revert"; + +export type ExecutorAction = { typ: ExecutorActionType, next_action: ExecutorAction | null, }; + +export type McpConfig = { servers: { [key in string]?: JsonValue }, servers_path: Array, template: JsonValue, preconfigured: JsonValue, is_toml_config: boolean, }; + +export type ExecutorActionType = { "type": "CodingAgentInitialRequest" } & CodingAgentInitialRequest | { "type": "CodingAgentFollowUpRequest" } & CodingAgentFollowUpRequest | { "type": "ScriptRequest" } & ScriptRequest; + +export type ScriptContext = "SetupScript" | "CleanupScript" | "DevServer" | "ToolInstallScript"; + +export type ScriptRequest = { script: string, language: ScriptRequestLanguage, context: ScriptContext, }; + +export type ScriptRequestLanguage = "Bash"; + +export enum BaseCodingAgent { CLAUDE_CODE = "CLAUDE_CODE", AMP = "AMP", GEMINI = "GEMINI", CODEX = "CODEX", OPENCODE = "OPENCODE", CURSOR_AGENT = "CURSOR_AGENT", QWEN_CODE = "QWEN_CODE", COPILOT = "COPILOT", DROID = "DROID" } + +export type CodingAgent = { "CLAUDE_CODE": ClaudeCode } | { "AMP": Amp } | { "GEMINI": Gemini } | { "CODEX": Codex } | { "OPENCODE": Opencode } | { "CURSOR_AGENT": CursorAgent } | { "QWEN_CODE": QwenCode } | { "COPILOT": Copilot } | { "DROID": Droid }; + +export type AvailabilityInfo = { "type": "LOGIN_DETECTED", last_auth_timestamp: bigint, } | { "type": "INSTALLATION_FOUND" } | { "type": "NOT_FOUND" }; export type CommandBuilder = { /** @@ -308,74 +392,6 @@ export type CodingAgentFollowUpRequest = { prompt: string, session_id: string, */ executor_profile_id: ExecutorProfileId, }; -export type CreateTaskAttemptBody = { task_id: string, -/** - * Executor profile specification - */ -executor_profile_id: ExecutorProfileId, base_branch: string, }; - -export type RunAgentSetupRequest = { executor_profile_id: ExecutorProfileId, }; - -export type RunAgentSetupResponse = Record; - -export type GhCliSetupError = "BREW_MISSING" | "SETUP_HELPER_NOT_SUPPORTED" | { "OTHER": { message: string, } }; - -export type RebaseTaskAttemptRequest = { old_base_branch: string | null, new_base_branch: string | null, }; - -export type GitOperationError = { "type": "merge_conflicts", message: string, op: ConflictOp, } | { "type": "rebase_in_progress" }; - -export type PushError = { "type": "force_push_required" }; - -export type CreatePrError = { "type": "github_cli_not_installed" } | { "type": "github_cli_not_logged_in" } | { "type": "git_cli_not_logged_in" } | { "type": "git_cli_not_installed" } | { "type": "target_branch_not_found", branch: string, }; - -export type BranchStatus = { commits_behind: number | null, commits_ahead: number | null, has_uncommitted_changes: boolean | null, head_oid: string | null, uncommitted_count: number | null, untracked_count: number | null, target_branch_name: string, remote_commits_behind: number | null, remote_commits_ahead: number | null, merges: Array, -/** - * True if a `git rebase` is currently in progress in this worktree - */ -is_rebase_in_progress: boolean, -/** - * Current conflict operation if any - */ -conflict_op: ConflictOp | null, -/** - * List of files currently in conflicted (unmerged) state - */ -conflicted_files: Array, }; - -export type ConflictOp = "rebase" | "merge" | "cherry_pick" | "revert"; - -export type TaskAttempt = { id: string, task_id: string, container_ref: string | null, branch: string, target_branch: string, executor: string, worktree_deleted: boolean, setup_completed_at: string | null, created_at: string, updated_at: string, }; - -export type ExecutionProcess = { id: string, task_attempt_id: string, run_reason: ExecutionProcessRunReason, executor_action: ExecutorAction, -/** - * Git HEAD commit OID captured before the process starts - */ -before_head_commit: string | null, -/** - * Git HEAD commit OID captured after the process ends - */ -after_head_commit: string | null, status: ExecutionProcessStatus, exit_code: bigint | null, -/** - * dropped: true if this process is excluded from the current - * history view (due to restore/trimming). Hidden from logs/timeline; - * still listed in the Processes tab. - */ -dropped: boolean, started_at: string, completed_at: string | null, created_at: string, updated_at: string, }; - -export enum ExecutionProcessStatus { running = "running", completed = "completed", failed = "failed", killed = "killed" } - -export type ExecutionProcessRunReason = "setupscript" | "cleanupscript" | "codingagent" | "devserver"; - -export type Merge = { "type": "direct" } & DirectMerge | { "type": "pr" } & PrMerge; - -export type DirectMerge = { id: string, task_attempt_id: string, merge_commit: string, target_branch_name: string, created_at: string, }; - -export type PrMerge = { id: string, task_attempt_id: string, created_at: string, target_branch_name: string, pr_info: PullRequestInfo, }; - -export type MergeStatus = "open" | "merged" | "closed" | "unknown"; - -export type PullRequestInfo = { number: bigint, url: string, status: MergeStatus, merged_at: string | null, merge_commit_sha: string | null, }; - export type CommandExitStatus = { "type": "exit_code", code: number, } | { "type": "success", success: boolean, }; export type CommandRunResult = { exit_status: CommandExitStatus | null, output: string | null, }; @@ -412,10 +428,4 @@ export type ToolStatus = { "status": "created" } | { "status": "success" } | { " export type PatchType = { "type": "NORMALIZED_ENTRY", "content": NormalizedEntry } | { "type": "STDOUT", "content": string } | { "type": "STDERR", "content": string } | { "type": "DIFF", "content": Diff }; -export type ApprovalStatus = { "status": "pending" } | { "status": "approved" } | { "status": "denied", reason?: string, } | { "status": "timed_out" }; - -export type CreateApprovalRequest = { tool_name: string, tool_input: JsonValue, tool_call_id: string, }; - -export type ApprovalResponse = { execution_process_id: string, status: ApprovalStatus, }; - export type JsonValue = number | string | boolean | Array | { [key in string]?: JsonValue } | null; \ No newline at end of file