From fd9e5e5d79b04097054bbeaab5ba36dd315f8df9 Mon Sep 17 00:00:00 2001 From: Louis Knight-Webb Date: Mon, 15 Dec 2025 19:42:13 +0000 Subject: [PATCH] Remote review (#1521) --- .github/workflows/pre-release.yml | 40 + .gitignore | 1 + Cargo.lock | 806 ++++++++++++++++- Cargo.toml | 3 +- Dockerfile | 6 + ...1870571c4c121c32e0c3393f6770fc3608e95.json | 52 ++ ...16780da9db40e4d892a75d7d244f247db5c04.json | 14 + ...aace6830d78daacfcbd7af69e4f76a234d01c.json | 76 ++ ...1a50ee50b8523c92bfe3ccc82b09518608204.json | 119 +++ ...c1a3e1160c7407b94d7baa84d6a3cdc5667c5.json | 23 + ...b8088f60bf3390bfaddbae993e87df89b8844.json | 76 ++ ...1ba969bacf776c0be43b608b5b0ca3f68c1fe.json | 12 + ...ab1618c6ae90b4a60adb8cb4a75628cb22c1c.json | 14 + ...51e724d7054f8c62106795bccf9fd5366696d.json | 16 + ...07b458a668304ed6262c5afab4b85a227d119.json | 14 + ...58aff9b042724bda846aadd9bf1b1a3a53791.json | 14 + ...00739e0ee3cd00d06a692beac0f0fb2324ac8.json | 118 +++ ...c9833cfa4fbc45c3bed269d25a8ada30634e4.json | 14 + ...f276910980a22c0eeb17a8f4028d07d36515b.json | 46 + ...8b1d7af1be7d48c1c943bc93e80f30da6f6d6.json | 14 + ...83a64d281a931aaacd1ed6e4b73f18f1b6a2f.json | 14 + ...8228fb438985f6b78f9d83663ecb11d59675f.json | 14 + ...9bb594e83fc21664f062f29ed148969b17c0b.json | 112 +++ ...0f84474986985ec0d204ab17becd6d3568d0a.json | 14 + ...7f2b9c21ab6552986181acc10a1523852655c.json | 55 ++ ...443de1967c2d024be80ae1d9878e27b474986.json | 15 + ...b7ea50b0d6865f0708b6113bea68a54d857f4.json | 15 + ...d42029b58919a0ac0e8900320fee60c5c93b2.json | 76 ++ ...287fcfb26b609107d753e372faeb7f9d92302.json | 81 ++ crates/remote/Cargo.toml | 13 +- crates/remote/Dockerfile | 2 +- crates/remote/docker-compose.yml | 11 +- .../20251212000000_create_reviews_table.sql | 17 + ...0251215000000_github_app_installations.sql | 40 + ...16000000_add_webhook_fields_to_reviews.sql | 15 + crates/remote/src/app.rs | 43 +- crates/remote/src/config.rs | 108 +++ crates/remote/src/db/github_app.rs | 490 +++++++++++ crates/remote/src/db/mod.rs | 2 + crates/remote/src/db/reviews.rs | 252 ++++++ crates/remote/src/github_app/jwt.rs | 97 +++ crates/remote/src/github_app/mod.rs | 9 + crates/remote/src/github_app/pr_review.rs | 243 ++++++ crates/remote/src/github_app/service.rs | 387 +++++++++ crates/remote/src/github_app/webhook.rs | 80 ++ crates/remote/src/lib.rs | 2 + crates/remote/src/mail.rs | 88 ++ crates/remote/src/r2.rs | 134 +++ crates/remote/src/routes/github_app.rs | 821 ++++++++++++++++++ crates/remote/src/routes/mod.rs | 7 +- crates/remote/src/routes/review.rs | 475 ++++++++++ crates/remote/src/state.rs | 16 + crates/review/Cargo.toml | 29 + crates/review/src/api.rs | 208 +++++ crates/review/src/archive.rs | 106 +++ crates/review/src/claude_session.rs | 513 +++++++++++ crates/review/src/config.rs | 47 + crates/review/src/error.rs | 43 + crates/review/src/github.rs | 229 +++++ crates/review/src/main.rs | 255 ++++++ crates/review/src/session_selector.rs | 173 ++++ local-build.sh | 7 + npx-cli/bin/cli.js | 13 + pnpm-lock.yaml | 21 + .../{.env.example => .env.production.example} | 4 + remote-frontend/index.html | 2 + remote-frontend/package.json | 14 +- remote-frontend/public/favicon.png | Bin 0 -> 13074 bytes remote-frontend/public/logo_light.png | Bin 0 -> 15922 bytes .../public/review_fast_logo_dark.svg | 3 + remote-frontend/public/robots.txt | 2 + remote-frontend/src/App.tsx | 5 +- remote-frontend/src/AppRouter.tsx | 42 +- remote-frontend/src/api.ts | 451 ++++++++++ remote-frontend/src/auth.ts | 26 + .../src/components/CodeFragmentCard.tsx | 299 +++++++ .../src/components/MarkdownRenderer.tsx | 84 ++ remote-frontend/src/index.css | 97 +++ remote-frontend/src/lib/diff-parser.ts | 281 ++++++ remote-frontend/src/lib/extToLanguage.ts | 72 ++ remote-frontend/src/lib/utils.ts | 42 + remote-frontend/src/main.tsx | 24 +- .../src/pages/AccountCompletePage.tsx | 149 ++++ remote-frontend/src/pages/AccountPage.tsx | 357 ++++++++ remote-frontend/src/pages/HomePage.tsx | 2 +- .../src/pages/InvitationCompletePage.tsx | 122 ++- remote-frontend/src/pages/InvitationPage.tsx | 71 +- remote-frontend/src/pages/NotFoundPage.tsx | 2 +- .../src/pages/OrganizationPage.tsx | 782 +++++++++++++++++ remote-frontend/src/pages/ReviewPage.tsx | 614 +++++++++++++ remote-frontend/src/pkce.ts | 43 +- remote-frontend/src/styles/diff-overrides.css | 170 ++++ remote-frontend/src/types/review.ts | 17 + remote-frontend/src/vite-env.d.ts | 8 +- remote-frontend/tailwind.config.js | 46 +- 95 files changed, 10506 insertions(+), 195 deletions(-) create mode 100644 crates/remote/.sqlx/query-00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95.json create mode 100644 crates/remote/.sqlx/query-18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04.json create mode 100644 crates/remote/.sqlx/query-40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c.json create mode 100644 crates/remote/.sqlx/query-4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204.json create mode 100644 crates/remote/.sqlx/query-4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5.json create mode 100644 crates/remote/.sqlx/query-471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844.json create mode 100644 crates/remote/.sqlx/query-55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe.json create mode 100644 crates/remote/.sqlx/query-574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c.json create mode 100644 crates/remote/.sqlx/query-5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d.json create mode 100644 crates/remote/.sqlx/query-6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119.json create mode 100644 crates/remote/.sqlx/query-79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791.json create mode 100644 crates/remote/.sqlx/query-8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8.json create mode 100644 crates/remote/.sqlx/query-9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4.json create mode 100644 crates/remote/.sqlx/query-9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b.json create mode 100644 crates/remote/.sqlx/query-a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6.json create mode 100644 crates/remote/.sqlx/query-bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f.json create mode 100644 crates/remote/.sqlx/query-c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f.json create mode 100644 crates/remote/.sqlx/query-da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b.json create mode 100644 crates/remote/.sqlx/query-df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a.json create mode 100644 crates/remote/.sqlx/query-e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c.json create mode 100644 crates/remote/.sqlx/query-ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986.json create mode 100644 crates/remote/.sqlx/query-f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4.json create mode 100644 crates/remote/.sqlx/query-f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2.json create mode 100644 crates/remote/.sqlx/query-fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302.json create mode 100644 crates/remote/migrations/20251212000000_create_reviews_table.sql create mode 100644 crates/remote/migrations/20251215000000_github_app_installations.sql create mode 100644 crates/remote/migrations/20251216000000_add_webhook_fields_to_reviews.sql create mode 100644 crates/remote/src/db/github_app.rs create mode 100644 crates/remote/src/db/reviews.rs create mode 100644 crates/remote/src/github_app/jwt.rs create mode 100644 crates/remote/src/github_app/mod.rs create mode 100644 crates/remote/src/github_app/pr_review.rs create mode 100644 crates/remote/src/github_app/service.rs create mode 100644 crates/remote/src/github_app/webhook.rs create mode 100644 crates/remote/src/r2.rs create mode 100644 crates/remote/src/routes/github_app.rs create mode 100644 crates/remote/src/routes/review.rs create mode 100644 crates/review/Cargo.toml create mode 100644 crates/review/src/api.rs create mode 100644 crates/review/src/archive.rs create mode 100644 crates/review/src/claude_session.rs create mode 100644 crates/review/src/config.rs create mode 100644 crates/review/src/error.rs create mode 100644 crates/review/src/github.rs create mode 100644 crates/review/src/main.rs create mode 100644 crates/review/src/session_selector.rs rename remote-frontend/{.env.example => .env.production.example} (64%) create mode 100644 remote-frontend/public/favicon.png create mode 100644 remote-frontend/public/logo_light.png create mode 100644 remote-frontend/public/review_fast_logo_dark.svg create mode 100644 remote-frontend/public/robots.txt create mode 100644 remote-frontend/src/auth.ts create mode 100644 remote-frontend/src/components/CodeFragmentCard.tsx create mode 100644 remote-frontend/src/components/MarkdownRenderer.tsx create mode 100644 remote-frontend/src/lib/diff-parser.ts create mode 100644 remote-frontend/src/lib/extToLanguage.ts create mode 100644 remote-frontend/src/lib/utils.ts create mode 100644 remote-frontend/src/pages/AccountCompletePage.tsx create mode 100644 remote-frontend/src/pages/AccountPage.tsx create mode 100644 remote-frontend/src/pages/OrganizationPage.tsx create mode 100644 remote-frontend/src/pages/ReviewPage.tsx create mode 100644 remote-frontend/src/styles/diff-overrides.css create mode 100644 remote-frontend/src/types/review.ts diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index bc8aa2e0..5d442afb 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -234,6 +234,7 @@ jobs: run: | cargo zigbuild --release --target ${{ matrix.target }} -p server cargo zigbuild --release --target ${{ matrix.target }} --bin mcp_task_server + cargo zigbuild --release --target ${{ matrix.target }} -p review env: POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }} POSTHOG_API_ENDPOINT: ${{ secrets.POSTHOG_API_ENDPOINT }} @@ -244,6 +245,7 @@ jobs: run: | cargo build --release --target ${{ matrix.target }} -p server cargo build --release --target ${{ matrix.target }} --bin mcp_task_server + cargo build --release --target ${{ matrix.target }} -p review env: POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }} POSTHOG_API_ENDPOINT: ${{ secrets.POSTHOG_API_ENDPOINT }} @@ -268,9 +270,11 @@ jobs: if [[ "${{ matrix.os }}" == "windows-latest-l" ]]; then cp target/${{ matrix.target }}/release/server.exe dist/vibe-kanban-${{ matrix.name }}.exe cp target/${{ matrix.target }}/release/mcp_task_server.exe dist/vibe-kanban-mcp-${{ matrix.name }}.exe + cp target/${{ matrix.target }}/release/review.exe dist/vibe-kanban-review-${{ matrix.name }}.exe else cp target/${{ matrix.target }}/release/server dist/vibe-kanban-${{ matrix.name }} cp target/${{ matrix.target }}/release/mcp_task_server dist/vibe-kanban-mcp-${{ matrix.name }} + cp target/${{ matrix.target }}/release/review dist/vibe-kanban-review-${{ matrix.name }} fi # Code signing for macOS only @@ -335,12 +339,38 @@ jobs: notarize: true app_store_connect_api_key_json_file: app_store_key.json + - name: Sign Review binary (macOS) + if: runner.os == 'macOS' + uses: indygreg/apple-code-sign-action@v1 + with: + input_path: target/${{ matrix.target }}/release/review + output_path: vibe-kanban-review + p12_file: certificate.p12 + p12_password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} + sign: true + sign_args: "--code-signature-flags=runtime" + + - name: Package Review binary (macOS) + if: runner.os == 'macOS' + run: zip vibe-kanban-review.zip vibe-kanban-review + + - name: Notarize signed Review binary (macOS) + if: runner.os == 'macOS' + uses: indygreg/apple-code-sign-action@v1 + continue-on-error: true + with: + input_path: vibe-kanban-review.zip + sign: false + notarize: true + app_store_connect_api_key_json_file: app_store_key.json + - name: Prepare signed binaries (macOS) if: runner.os == 'macOS' run: | mkdir -p dist cp vibe-kanban.zip dist/vibe-kanban-${{ matrix.name }}.zip cp vibe-kanban-mcp.zip dist/vibe-kanban-mcp-${{ matrix.name }}.zip + cp vibe-kanban-review.zip dist/vibe-kanban-review-${{ matrix.name }}.zip - name: Clean up certificates (macOS) if: runner.os == 'macOS' @@ -367,26 +397,32 @@ jobs: name: linux-x64 binary: vibe-kanban mcp_binary: vibe-kanban-mcp + review_binary: vibe-kanban-review - target: x86_64-pc-windows-msvc name: windows-x64 binary: vibe-kanban.exe mcp_binary: vibe-kanban-mcp.exe + review_binary: vibe-kanban-review.exe - target: x86_64-apple-darwin name: macos-x64 binary: vibe-kanban mcp_binary: vibe-kanban-mcp + review_binary: vibe-kanban-review - target: aarch64-apple-darwin name: macos-arm64 binary: vibe-kanban mcp_binary: vibe-kanban-mcp + review_binary: vibe-kanban-review - target: aarch64-pc-windows-msvc name: windows-arm64 binary: vibe-kanban.exe mcp_binary: vibe-kanban-mcp.exe + review_binary: vibe-kanban-review.exe - target: aarch64-unknown-linux-musl name: linux-arm64 binary: vibe-kanban mcp_binary: vibe-kanban-mcp + review_binary: vibe-kanban-review steps: - uses: actions/checkout@v4 with: @@ -415,12 +451,15 @@ jobs: mkdir -p npx-cli/dist/${{ matrix.name }} mkdir vibe-kanban-${{ matrix.name }} mkdir vibe-kanban-mcp-${{ matrix.name }} + mkdir vibe-kanban-review-${{ matrix.name }} cp dist/vibe-kanban-${{ matrix.name }}* vibe-kanban-${{ matrix.name }}/${{ matrix.binary }} cp dist/vibe-kanban-mcp-${{ matrix.name }}* vibe-kanban-mcp-${{ matrix.name }}/${{ matrix.mcp_binary }} + cp dist/vibe-kanban-review-${{ matrix.name }}* vibe-kanban-review-${{ matrix.name }}/${{ matrix.review_binary }} zip -j npx-cli/dist/${{ matrix.name }}/vibe-kanban.zip vibe-kanban-${{ matrix.name }}/${{ matrix.binary }} zip -j npx-cli/dist/${{ matrix.name }}/vibe-kanban-mcp.zip vibe-kanban-mcp-${{ matrix.name }}/${{ matrix.mcp_binary }} + zip -j npx-cli/dist/${{ matrix.name }}/vibe-kanban-review.zip vibe-kanban-review-${{ matrix.name }}/${{ matrix.review_binary }} - name: Create platform package (macOS) if: matrix.name == 'macos-arm64' || matrix.name == 'macos-x64' @@ -429,6 +468,7 @@ jobs: mkdir vibe-kanban-${{ matrix.name }} cp dist/vibe-kanban-${{ matrix.name }}* npx-cli/dist/${{ matrix.name }}/vibe-kanban.zip cp dist/vibe-kanban-mcp-${{ matrix.name }}* npx-cli/dist/${{ matrix.name }}/vibe-kanban-mcp.zip + cp dist/vibe-kanban-review-${{ matrix.name }}* npx-cli/dist/${{ matrix.name }}/vibe-kanban-review.zip - name: Upload platform package artifact uses: actions/upload-artifact@v4 diff --git a/.gitignore b/.gitignore index bb921eaf..d3484afe 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,7 @@ yarn-error.log* .env.development.local .env.test.local .env.production.local +.env.production # IDE .vscode/ diff --git a/Cargo.lock b/Cargo.lock index a50e4d70..a996eeb7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -466,6 +466,379 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "aws-config" +version = "1.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96571e6996817bf3d58f6b569e4b9fd2e9d2fcf9f7424eed07b2ce9bb87535e5" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 1.3.1", + "time", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "aws-credential-types" +version = "1.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cd362783681b15d136480ad555a099e82ecd8e2d10a841e14dfd0078d67fee3" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-lc-rs" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b5ce75405893cd713f9ab8e297d8e438f624dde7d706108285f7e17a25a180f" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "179c3777a8b5e70e90ea426114ffc565b2c1a9f82f6c4a0c5a34aa6ef5e781b6" +dependencies = [ + "cc", + "cmake", + "dunce", + "fs_extra", +] + +[[package]] +name = "aws-runtime" +version = "1.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d81b5b2898f6798ad58f484856768bca817e3cd9de0974c24ae0f1113fe88f1b" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-s3" +version = "1.117.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c134e2d1ad1ad23a8cf88ceccf39d515914f385e670ffc12226013bd16dfe825" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-checksums", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "fastrand", + "hex", + "hmac", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "lru", + "percent-encoding", + "regex-lite", + "sha2", + "tracing", + "url", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.95.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55542378e419558e6b1f398ca70adb0b2088077e79ad9f14eb09441f2f7b2164" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "fastrand", + "http 0.2.12", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e523e1c4e8e7e8ff219d732988e22bfeae8a1cafdbe6d9eca1546fa080be7c" +dependencies = [ + "aws-credential-types", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.3.1", + "percent-encoding", + "sha2", + "time", + "tracing", +] + +[[package]] +name = "aws-smithy-async" +version = "1.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ee19095c7c4dda59f1697d028ce704c24b2d33c6718790c7f1d5a3015b4107c" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "aws-smithy-checksums" +version = "0.63.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87294a084b43d649d967efe58aa1f9e0adc260e13a6938eb904c0ae9b45824ae" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes", + "crc-fast", + "hex", + "http 0.2.12", + "http-body 0.4.6", + "md-5", + "pin-project-lite", + "sha1", + "sha2", + "tracing", +] + +[[package]] +name = "aws-smithy-eventstream" +version = "0.60.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc12f8b310e38cad85cf3bef45ad236f470717393c613266ce0a89512286b650" +dependencies = [ + "aws-smithy-types", + "bytes", + "crc32fast", +] + +[[package]] +name = "aws-smithy-http" +version = "0.62.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "826141069295752372f8203c17f28e30c464d22899a43a0c9fd9c458d469c88b" +dependencies = [ + "aws-smithy-eventstream", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "futures-util", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-http-client" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59e62db736db19c488966c8d787f52e6270be565727236fd5579eaa301e7bc4a" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "h2 0.3.27", + "h2 0.4.12", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper 1.7.0", + "hyper-rustls 0.24.2", + "hyper-rustls 0.27.7", + "hyper-util", + "pin-project-lite", + "rustls 0.21.12", + "rustls 0.23.34", + "rustls-native-certs", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.4", + "tower", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.61.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6864c190cbb8e30cf4b77b2c8f3b6dfffa697a09b7218d2f7cd3d4c4065a9f7" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-observability" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f616c3f2260612fe44cede278bafa18e73e6479c4e393e2c4518cf2a9a228a" +dependencies = [ + "aws-smithy-runtime-api", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae5d689cf437eae90460e944a58b5668530d433b4ff85789e69d2f2a556e057d" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a392db6c583ea4a912538afb86b7be7c5d8887d91604f50eb55c262ee1b4a5f5" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-http-client", + "aws-smithy-observability", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "http-body 1.0.1", + "pin-project-lite", + "pin-utils", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab0d43d899f9e508300e587bf582ba54c27a452dd0a9ea294690669138ae14a2" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.3.1", + "pin-project-lite", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "905cb13a9895626d49cf2ced759b062d913834c7482c38e49557eac4e6193f01" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "http-body 1.0.1", + "http-body-util", + "itoa", + "num-integer", + "pin-project-lite", + "pin-utils", + "ryu", + "serde", + "time", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11b2f670422ff42bf7065031e72b45bc52a3508bd089f743ea90731ca2b6ea57" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d980627d2dd7bfc32a3c025685a033eeab8d365cc840c631ef59d1b8f428164" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "rustc_version", + "tracing", +] + [[package]] name = "axum" version = "0.8.6" @@ -479,9 +852,9 @@ dependencies = [ "form_urlencoded", "futures-util", "http 1.3.1", - "http-body", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.7.0", "hyper-util", "itoa", "matchit", @@ -513,7 +886,7 @@ dependencies = [ "bytes", "futures-core", "http 1.3.1", - "http-body", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", @@ -535,7 +908,7 @@ dependencies = [ "futures-util", "headers", "http 1.3.1", - "http-body", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", @@ -595,6 +968,16 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + [[package]] name = "base64ct" version = "1.8.0" @@ -732,6 +1115,16 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + [[package]] name = "bytestring" version = "1.5.0" @@ -743,9 +1136,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.44" +version = "1.2.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37521ac7aabe3d13122dc382493e20c9416f299d2ccd5b3a5340a2570cdeb0f3" +checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215" dependencies = [ "find-msvc-tools", "jobserver", @@ -849,6 +1242,15 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" +[[package]] +name = "cmake" +version = "0.1.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d49d74c227b6cc9f3c51a2c7c667a05b6453f7f0f952a5f8e4493bb9e731d68e" +dependencies = [ + "cc", +] + [[package]] name = "codex-app-server-protocol" version = "0.71.0" @@ -955,6 +1357,19 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "console" +version = "0.15.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width", + "windows-sys 0.59.0", +] + [[package]] name = "const-oid" version = "0.9.6" @@ -980,6 +1395,16 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -1010,6 +1435,19 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" +[[package]] +name = "crc-fast" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ddc2d09feefeee8bd78101665bd8645637828fa9317f9f292496dbbd8c65ff3" +dependencies = [ + "crc", + "digest", + "rand 0.9.2", + "regex", + "rustversion", +] + [[package]] name = "crc32fast" version = "1.5.0" @@ -1278,6 +1716,19 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "dialoguer" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" +dependencies = [ + "console", + "shell-words", + "tempfile", + "thiserror 1.0.69", + "zeroize", +] + [[package]] name = "digest" version = "0.10.7" @@ -1454,6 +1905,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + [[package]] name = "encoding_rs" version = "0.8.35" @@ -1644,10 +2101,22 @@ dependencies = [ ] [[package]] -name = "find-msvc-tools" -version = "0.1.4" +name = "filetime" +version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" +checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" +dependencies = [ + "cfg-if", + "libc", + "libredox", + "windows-sys 0.60.2", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" [[package]] name = "findshlibs" @@ -1748,6 +2217,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "fsevent-sys" version = "4.1.0" @@ -1998,6 +2473,25 @@ dependencies = [ "subtle", ] +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.12.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "h2" version = "0.4.12" @@ -2157,6 +2651,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + [[package]] name = "http-body" version = "1.0.1" @@ -2176,7 +2681,7 @@ dependencies = [ "bytes", "futures-core", "http 1.3.1", - "http-body", + "http-body 1.0.1", "pin-project-lite", ] @@ -2198,6 +2703,30 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.27", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + [[package]] name = "hyper" version = "1.7.0" @@ -2208,9 +2737,9 @@ dependencies = [ "bytes", "futures-channel", "futures-core", - "h2", + "h2 0.4.12", "http 1.3.1", - "http-body", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -2221,6 +2750,21 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "log", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.7" @@ -2228,12 +2772,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ "http 1.3.1", - "hyper", + "hyper 1.7.0", "hyper-util", - "rustls", + "rustls 0.23.34", + "rustls-native-certs", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.4", "tower-service", "webpki-roots 1.0.4", ] @@ -2246,7 +2791,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper", + "hyper 1.7.0", "hyper-util", "native-tls", "tokio", @@ -2266,8 +2811,8 @@ dependencies = [ "futures-core", "futures-util", "http 1.3.1", - "http-body", - "hyper", + "http-body 1.0.1", + "hyper 1.7.0", "ipnet", "libc", "percent-encoding", @@ -2517,6 +3062,19 @@ dependencies = [ "serde_core", ] +[[package]] +name = "indicatif" +version = "0.17.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" +dependencies = [ + "console", + "number_prefix", + "portable-atomic", + "unicode-width", + "web-time", +] + [[package]] name = "inotify" version = "0.11.0" @@ -2552,6 +3110,15 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "ipnetwork" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf466541e9d546596ee94f9f69590f89473455f88372423e0008fc1a7daf100e" +dependencies = [ + "serde", +] + [[package]] name = "iri-string" version = "0.7.8" @@ -3065,7 +3632,7 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] @@ -3238,6 +3805,12 @@ dependencies = [ "libm", ] +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + [[package]] name = "objc2" version = "0.6.3" @@ -3407,6 +3980,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "outref" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" + [[package]] name = "p256" version = "0.13.2" @@ -3727,7 +4306,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.1.1", - "rustls", + "rustls 0.23.34", "socket2 0.6.1", "thiserror 2.0.17", "tokio", @@ -3747,7 +4326,7 @@ dependencies = [ "rand 0.9.2", "ring", "rustc-hash 2.1.1", - "rustls", + "rustls 0.23.34", "rustls-pki-types", "slab", "thiserror 2.0.17", @@ -3937,11 +4516,18 @@ dependencies = [ "aes-gcm", "anyhow", "async-trait", + "aws-config", + "aws-credential-types", + "aws-sdk-s3", "axum", "axum-extra", "base64", "chrono", + "flate2", "futures", + "hex", + "hmac", + "ipnetwork", "jsonwebtoken 9.3.1", "rand 0.9.2", "reqwest", @@ -3952,6 +4538,9 @@ dependencies = [ "serde_json", "sha2", "sqlx", + "subtle", + "tar", + "tempfile", "thiserror 2.0.17", "tokio", "tokio-stream", @@ -3961,6 +4550,7 @@ dependencies = [ "tracing-subscriber", "ts-rs 11.0.1", "url", + "urlencoding", "utils", "uuid", ] @@ -3977,12 +4567,12 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.4.12", "http 1.3.1", - "http-body", + "http-body 1.0.1", "http-body-util", - "hyper", - "hyper-rustls", + "hyper 1.7.0", + "hyper-rustls 0.27.7", "hyper-tls", "hyper-util", "js-sys", @@ -3992,7 +4582,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls", + "rustls 0.23.34", "rustls-pki-types", "serde", "serde_json", @@ -4000,7 +4590,7 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-native-tls", - "tokio-rustls", + "tokio-rustls 0.26.4", "tokio-util", "tower", "tower-http 0.6.6", @@ -4013,6 +4603,30 @@ dependencies = [ "webpki-roots 1.0.4", ] +[[package]] +name = "review" +version = "0.0.134" +dependencies = [ + "anyhow", + "chrono", + "clap", + "dialoguer", + "dirs 5.0.1", + "flate2", + "indicatif", + "reqwest", + "serde", + "serde_json", + "tar", + "tempfile", + "thiserror 2.0.17", + "tokio", + "toml", + "tracing", + "tracing-subscriber", + "uuid", +] + [[package]] name = "rfc6979" version = "0.4.0" @@ -4165,20 +4779,45 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + [[package]] name = "rustls" version = "0.23.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a9586e9ee2b4f8fab52a0048ca7334d7024eef48e2cb9407e3497bb7cab7fa7" dependencies = [ + "aws-lc-rs", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki", + "rustls-webpki 0.103.8", "subtle", "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework 3.5.1", +] + [[package]] name = "rustls-pemfile" version = "2.2.0" @@ -4198,12 +4837,23 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustls-webpki" version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -4308,6 +4958,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "sec1" version = "0.7.3" @@ -4338,7 +4998,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.10.0", - "core-foundation", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +dependencies = [ + "bitflags 2.10.0", + "core-foundation 0.10.1", "core-foundation-sys", "libc", "security-framework-sys", @@ -4701,7 +5374,7 @@ dependencies = [ "reqwest", "rust-embed", "secrecy", - "security-framework", + "security-framework 2.11.1", "serde", "serde_json", "sha2", @@ -4751,6 +5424,12 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "shell-words" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc6fe69c597f9c37bfeeeeeb33da3530379845f10be461a66d16d03eca2ded77" + [[package]] name = "shellexpand" version = "3.1.1" @@ -4896,11 +5575,12 @@ dependencies = [ "hashbrown 0.15.5", "hashlink", "indexmap 2.12.0", + "ipnetwork", "log", "memchr", "once_cell", "percent-encoding", - "rustls", + "rustls 0.23.34", "serde", "serde_json", "sha2", @@ -5017,6 +5697,7 @@ dependencies = [ "hkdf", "hmac", "home", + "ipnetwork", "itoa", "log", "md-5", @@ -5181,7 +5862,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags 2.10.0", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys", ] @@ -5201,6 +5882,17 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "tauri-winrt-notification" version = "0.7.2" @@ -5379,13 +6071,23 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ - "rustls", + "rustls 0.23.34", "tokio", ] @@ -5524,7 +6226,7 @@ dependencies = [ "bytes", "futures-util", "http 1.3.1", - "http-body", + "http-body 1.0.1", "http-body-util", "http-range-header", "httpdate", @@ -5550,7 +6252,7 @@ dependencies = [ "bytes", "futures-util", "http 1.3.1", - "http-body", + "http-body 1.0.1", "iri-string", "pin-project-lite", "tower", @@ -5790,6 +6492,12 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "unicode-width" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -5854,6 +6562,12 @@ dependencies = [ "serde", ] +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + [[package]] name = "utf-8" version = "0.7.6" @@ -5942,6 +6656,12 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + [[package]] name = "vte" version = "0.14.1" @@ -6603,12 +7323,28 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix", +] + [[package]] name = "xdg" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb433233f2df9344722454bc7e96465c9d03bff9d77c248f9e7523fe79585b5" +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + [[package]] name = "yoke" version = "0.8.1" diff --git a/Cargo.toml b/Cargo.toml index eddba95a..27321f4d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,8 @@ members = [ "crates/utils", "crates/local-deployment", "crates/deployment", - "crates/remote" + "crates/remote", + "crates/review" ] [workspace.dependencies] diff --git a/Dockerfile b/Dockerfile index 70b51bfe..dc0e9db3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,6 +16,12 @@ ENV RUSTFLAGS="-C target-feature=-crt-static" RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y ENV PATH="/root/.cargo/bin:${PATH}" +ARG POSTHOG_API_KEY +ARG POSTHOG_API_ENDPOINT + +ENV VITE_PUBLIC_POSTHOG_KEY=$POSTHOG_API_KEY +ENV VITE_PUBLIC_POSTHOG_HOST=$POSTHOG_API_ENDPOINT + # Set working directory WORKDIR /app diff --git a/crates/remote/.sqlx/query-00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95.json b/crates/remote/.sqlx/query-00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95.json new file mode 100644 index 00000000..aa0d47a3 --- /dev/null +++ b/crates/remote/.sqlx/query-00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n organization_id,\n user_id,\n state_token,\n expires_at,\n created_at\n FROM github_app_pending_installations\n WHERE state_token = $1 AND expires_at > NOW()\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "organization_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "state_token", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + }, + "hash": "00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95" +} diff --git a/crates/remote/.sqlx/query-18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04.json b/crates/remote/.sqlx/query-18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04.json new file mode 100644 index 00000000..08fb834c --- /dev/null +++ b/crates/remote/.sqlx/query-18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE reviews\n SET status = 'failed'\n WHERE id = $1 AND deleted_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04" +} diff --git a/crates/remote/.sqlx/query-40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c.json b/crates/remote/.sqlx/query-40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c.json new file mode 100644 index 00000000..acd2984e --- /dev/null +++ b/crates/remote/.sqlx/query-40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c.json @@ -0,0 +1,76 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n FROM github_app_installations\n WHERE github_account_login = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "organization_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "github_installation_id", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "github_account_login", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "github_account_type", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "repository_selection", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "installed_by_user_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "suspended_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c" +} diff --git a/crates/remote/.sqlx/query-4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204.json b/crates/remote/.sqlx/query-4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204.json new file mode 100644 index 00000000..48ec5a7d --- /dev/null +++ b/crates/remote/.sqlx/query-4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204.json @@ -0,0 +1,119 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO reviews (id, gh_pr_url, r2_path, pr_title, github_installation_id, pr_owner, pr_repo, pr_number)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8)\n RETURNING\n id,\n gh_pr_url,\n claude_code_session_id,\n ip_address AS \"ip_address: IpNetwork\",\n review_cache,\n last_viewed_at,\n r2_path,\n deleted_at,\n created_at,\n email,\n pr_title,\n status,\n github_installation_id,\n pr_owner,\n pr_repo,\n pr_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "gh_pr_url", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "claude_code_session_id", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "ip_address: IpNetwork", + "type_info": "Inet" + }, + { + "ordinal": 4, + "name": "review_cache", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "last_viewed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "r2_path", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "deleted_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "pr_title", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "github_installation_id", + "type_info": "Int8" + }, + { + "ordinal": 13, + "name": "pr_owner", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "pr_repo", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "pr_number", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Text", + "Int8", + "Text", + "Text", + "Int4" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + true, + false, + true, + false, + true, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204" +} diff --git a/crates/remote/.sqlx/query-4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5.json b/crates/remote/.sqlx/query-4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5.json new file mode 100644 index 00000000..001a2dcd --- /dev/null +++ b/crates/remote/.sqlx/query-4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT COUNT(*) as \"count!\"\n FROM reviews\n WHERE ip_address = $1\n AND created_at > $2\n AND deleted_at IS NULL\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Inet", + "Timestamptz" + ] + }, + "nullable": [ + null + ] + }, + "hash": "4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5" +} diff --git a/crates/remote/.sqlx/query-471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844.json b/crates/remote/.sqlx/query-471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844.json new file mode 100644 index 00000000..ad1e2e82 --- /dev/null +++ b/crates/remote/.sqlx/query-471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844.json @@ -0,0 +1,76 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n FROM github_app_installations\n WHERE github_installation_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "organization_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "github_installation_id", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "github_account_login", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "github_account_type", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "repository_selection", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "installed_by_user_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "suspended_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844" +} diff --git a/crates/remote/.sqlx/query-55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe.json b/crates/remote/.sqlx/query-55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe.json new file mode 100644 index 00000000..71ecef02 --- /dev/null +++ b/crates/remote/.sqlx/query-55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM github_app_pending_installations\n WHERE expires_at < NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe" +} diff --git a/crates/remote/.sqlx/query-574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c.json b/crates/remote/.sqlx/query-574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c.json new file mode 100644 index 00000000..d92cd1c1 --- /dev/null +++ b/crates/remote/.sqlx/query-574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE github_app_installations\n SET suspended_at = NOW(), updated_at = NOW()\n WHERE github_installation_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c" +} diff --git a/crates/remote/.sqlx/query-5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d.json b/crates/remote/.sqlx/query-5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d.json new file mode 100644 index 00000000..3d2b092f --- /dev/null +++ b/crates/remote/.sqlx/query-5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO github_app_repositories (installation_id, github_repo_id, repo_full_name)\n VALUES ($1, $2, $3)\n ON CONFLICT (installation_id, github_repo_id) DO UPDATE SET\n repo_full_name = EXCLUDED.repo_full_name\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d" +} diff --git a/crates/remote/.sqlx/query-6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119.json b/crates/remote/.sqlx/query-6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119.json new file mode 100644 index 00000000..bd846e90 --- /dev/null +++ b/crates/remote/.sqlx/query-6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM github_app_pending_installations\n WHERE organization_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119" +} diff --git a/crates/remote/.sqlx/query-79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791.json b/crates/remote/.sqlx/query-79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791.json new file mode 100644 index 00000000..45f675ff --- /dev/null +++ b/crates/remote/.sqlx/query-79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE reviews\n SET status = 'completed'\n WHERE id = $1 AND deleted_at IS NULL\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791" +} diff --git a/crates/remote/.sqlx/query-8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8.json b/crates/remote/.sqlx/query-8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8.json new file mode 100644 index 00000000..92326ab1 --- /dev/null +++ b/crates/remote/.sqlx/query-8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8.json @@ -0,0 +1,118 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO reviews (id, gh_pr_url, claude_code_session_id, ip_address, r2_path, email, pr_title)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n RETURNING\n id,\n gh_pr_url,\n claude_code_session_id,\n ip_address AS \"ip_address: IpNetwork\",\n review_cache,\n last_viewed_at,\n r2_path,\n deleted_at,\n created_at,\n email,\n pr_title,\n status,\n github_installation_id,\n pr_owner,\n pr_repo,\n pr_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "gh_pr_url", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "claude_code_session_id", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "ip_address: IpNetwork", + "type_info": "Inet" + }, + { + "ordinal": 4, + "name": "review_cache", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "last_viewed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "r2_path", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "deleted_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "pr_title", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "github_installation_id", + "type_info": "Int8" + }, + { + "ordinal": 13, + "name": "pr_owner", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "pr_repo", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "pr_number", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Inet", + "Text", + "Text", + "Text" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + true, + false, + true, + false, + true, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8" +} diff --git a/crates/remote/.sqlx/query-9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4.json b/crates/remote/.sqlx/query-9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4.json new file mode 100644 index 00000000..eff2a4be --- /dev/null +++ b/crates/remote/.sqlx/query-9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM github_app_pending_installations\n WHERE state_token = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [] + }, + "hash": "9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4" +} diff --git a/crates/remote/.sqlx/query-9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b.json b/crates/remote/.sqlx/query-9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b.json new file mode 100644 index 00000000..e54ac3ac --- /dev/null +++ b/crates/remote/.sqlx/query-9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n installation_id,\n github_repo_id,\n repo_full_name,\n created_at\n FROM github_app_repositories\n WHERE installation_id = $1\n ORDER BY repo_full_name\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "installation_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "github_repo_id", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "repo_full_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + }, + "hash": "9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b" +} diff --git a/crates/remote/.sqlx/query-a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6.json b/crates/remote/.sqlx/query-a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6.json new file mode 100644 index 00000000..70ea79cf --- /dev/null +++ b/crates/remote/.sqlx/query-a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM github_app_repositories\n WHERE installation_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6" +} diff --git a/crates/remote/.sqlx/query-bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f.json b/crates/remote/.sqlx/query-bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f.json new file mode 100644 index 00000000..d1f09ec0 --- /dev/null +++ b/crates/remote/.sqlx/query-bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM github_app_installations\n WHERE organization_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f" +} diff --git a/crates/remote/.sqlx/query-c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f.json b/crates/remote/.sqlx/query-c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f.json new file mode 100644 index 00000000..f41fca48 --- /dev/null +++ b/crates/remote/.sqlx/query-c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE github_app_installations\n SET suspended_at = NULL, updated_at = NOW()\n WHERE github_installation_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f" +} diff --git a/crates/remote/.sqlx/query-da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b.json b/crates/remote/.sqlx/query-da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b.json new file mode 100644 index 00000000..f675e1b4 --- /dev/null +++ b/crates/remote/.sqlx/query-da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b.json @@ -0,0 +1,112 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n gh_pr_url,\n claude_code_session_id,\n ip_address AS \"ip_address: IpNetwork\",\n review_cache,\n last_viewed_at,\n r2_path,\n deleted_at,\n created_at,\n email,\n pr_title,\n status,\n github_installation_id,\n pr_owner,\n pr_repo,\n pr_number\n FROM reviews\n WHERE id = $1 AND deleted_at IS NULL\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "gh_pr_url", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "claude_code_session_id", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "ip_address: IpNetwork", + "type_info": "Inet" + }, + { + "ordinal": 4, + "name": "review_cache", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "last_viewed_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "r2_path", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "deleted_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "pr_title", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "github_installation_id", + "type_info": "Int8" + }, + { + "ordinal": 13, + "name": "pr_owner", + "type_info": "Text" + }, + { + "ordinal": 14, + "name": "pr_repo", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "pr_number", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + true, + true, + true, + true, + false, + true, + false, + true, + false, + false, + true, + true, + true, + true + ] + }, + "hash": "da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b" +} diff --git a/crates/remote/.sqlx/query-df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a.json b/crates/remote/.sqlx/query-df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a.json new file mode 100644 index 00000000..250779e5 --- /dev/null +++ b/crates/remote/.sqlx/query-df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM github_app_installations\n WHERE github_installation_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a" +} diff --git a/crates/remote/.sqlx/query-e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c.json b/crates/remote/.sqlx/query-e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c.json new file mode 100644 index 00000000..0302bea1 --- /dev/null +++ b/crates/remote/.sqlx/query-e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c.json @@ -0,0 +1,55 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO github_app_pending_installations (organization_id, user_id, state_token, expires_at)\n VALUES ($1, $2, $3, $4)\n RETURNING\n id,\n organization_id,\n user_id,\n state_token,\n expires_at,\n created_at\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "organization_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "user_id", + "type_info": "Uuid" + }, + { + "ordinal": 3, + "name": "state_token", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "expires_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Text", + "Timestamptz" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + }, + "hash": "e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c" +} diff --git a/crates/remote/.sqlx/query-ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986.json b/crates/remote/.sqlx/query-ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986.json new file mode 100644 index 00000000..28ee7858 --- /dev/null +++ b/crates/remote/.sqlx/query-ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE github_app_installations\n SET repository_selection = $2, updated_at = NOW()\n WHERE github_installation_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986" +} diff --git a/crates/remote/.sqlx/query-f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4.json b/crates/remote/.sqlx/query-f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4.json new file mode 100644 index 00000000..3acf57ff --- /dev/null +++ b/crates/remote/.sqlx/query-f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM github_app_repositories\n WHERE installation_id = $1 AND github_repo_id = ANY($2)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Int8Array" + ] + }, + "nullable": [] + }, + "hash": "f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4" +} diff --git a/crates/remote/.sqlx/query-f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2.json b/crates/remote/.sqlx/query-f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2.json new file mode 100644 index 00000000..e25d06b4 --- /dev/null +++ b/crates/remote/.sqlx/query-f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2.json @@ -0,0 +1,76 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n FROM github_app_installations\n WHERE organization_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "organization_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "github_installation_id", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "github_account_login", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "github_account_type", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "repository_selection", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "installed_by_user_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "suspended_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2" +} diff --git a/crates/remote/.sqlx/query-fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302.json b/crates/remote/.sqlx/query-fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302.json new file mode 100644 index 00000000..c1db185c --- /dev/null +++ b/crates/remote/.sqlx/query-fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302.json @@ -0,0 +1,81 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO github_app_installations (\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id\n )\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (github_installation_id) DO UPDATE SET\n organization_id = EXCLUDED.organization_id,\n github_account_login = EXCLUDED.github_account_login,\n github_account_type = EXCLUDED.github_account_type,\n repository_selection = EXCLUDED.repository_selection,\n installed_by_user_id = EXCLUDED.installed_by_user_id,\n suspended_at = NULL,\n updated_at = NOW()\n RETURNING\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "organization_id", + "type_info": "Uuid" + }, + { + "ordinal": 2, + "name": "github_installation_id", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "github_account_login", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "github_account_type", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "repository_selection", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "installed_by_user_id", + "type_info": "Uuid" + }, + { + "ordinal": 7, + "name": "suspended_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "updated_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Int8", + "Text", + "Text", + "Text", + "Uuid" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + true, + false, + false + ] + }, + "hash": "fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302" +} diff --git a/crates/remote/Cargo.toml b/crates/remote/Cargo.toml index b41e2f10..88e521c6 100644 --- a/crates/remote/Cargo.toml +++ b/crates/remote/Cargo.toml @@ -18,7 +18,8 @@ sentry = { version = "0.41.0", features = ["anyhow", "backtrace", "panic", "debu sentry-tracing = { version = "0.41.0", features = ["backtrace"] } serde = { workspace = true } serde_json = { workspace = true } -sqlx = { version = "0.8.6", default-features = false, features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json", "macros", "migrate"] } +sqlx = { version = "0.8.6", default-features = false, features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json", "macros", "migrate", "ipnetwork"] } +ipnetwork = "0.20" tokio = { workspace = true } tokio-stream = { version = "0.1.17", features = ["sync"] } tower-http = { workspace = true } @@ -32,5 +33,15 @@ uuid = { version = "1", features = ["serde", "v4"] } jsonwebtoken = "9" rand = "0.9" sha2 = "0.10" +hmac = "0.12" +subtle = "2.5" +hex = "0.4" +urlencoding = "2.1" url = "2.5" base64 = "0.22" +aws-config = { version = "1.5", default-features = false, features = ["behavior-version-latest", "rustls"] } +aws-sdk-s3 = { version = "1.65", default-features = false, features = ["behavior-version-latest", "rustls"] } +aws-credential-types = "1.2" +tempfile = "3" +tar = "0.4" +flate2 = "1.0" diff --git a/crates/remote/Dockerfile b/crates/remote/Dockerfile index 700f03c1..a35d9aed 100644 --- a/crates/remote/Dockerfile +++ b/crates/remote/Dockerfile @@ -46,7 +46,7 @@ FROM debian:bookworm-slim AS runtime ARG APP_NAME RUN apt-get update \ - && apt-get install -y --no-install-recommends ca-certificates libssl3 wget \ + && apt-get install -y --no-install-recommends ca-certificates libssl3 wget git \ && rm -rf /var/lib/apt/lists/* \ && useradd --system --create-home --uid 10001 appuser diff --git a/crates/remote/docker-compose.yml b/crates/remote/docker-compose.yml index b1738433..e5cf9561 100644 --- a/crates/remote/docker-compose.yml +++ b/crates/remote/docker-compose.yml @@ -1,7 +1,7 @@ services: remote-db: image: postgres:16-alpine - command: ["postgres", "-c", "wal_level=logical"] + command: [ "postgres", "-c", "wal_level=logical" ] environment: POSTGRES_DB: remote POSTGRES_USER: remote @@ -57,6 +57,15 @@ services: VITE_APP_BASE_URL: http://localhost:3000 VITE_API_BASE_URL: http://localhost:3000 ELECTRIC_ROLE_PASSWORD: ${ELECTRIC_ROLE_PASSWORD:?set in .env.remote} + R2_ACCESS_KEY_ID: ${R2_ACCESS_KEY_ID:-} + R2_SECRET_ACCESS_KEY: ${R2_SECRET_ACCESS_KEY:-} + R2_REVIEW_ENDPOINT: ${R2_REVIEW_ENDPOINT:-} + R2_REVIEW_BUCKET: ${R2_REVIEW_BUCKET:-} + REVIEW_WORKER_BASE_URL: ${REVIEW_WORKER_BASE_URL:-} + GITHUB_APP_ID: ${GITHUB_APP_ID:-} + GITHUB_APP_PRIVATE_KEY: ${GITHUB_APP_PRIVATE_KEY:-} + GITHUB_APP_WEBHOOK_SECRET: ${GITHUB_APP_WEBHOOK_SECRET:-} + GITHUB_APP_SLUG: ${GITHUB_APP_SLUG:-} ports: - "127.0.0.1:3000:8081" restart: unless-stopped diff --git a/crates/remote/migrations/20251212000000_create_reviews_table.sql b/crates/remote/migrations/20251212000000_create_reviews_table.sql new file mode 100644 index 00000000..b8a05c5f --- /dev/null +++ b/crates/remote/migrations/20251212000000_create_reviews_table.sql @@ -0,0 +1,17 @@ +CREATE TABLE IF NOT EXISTS reviews ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + gh_pr_url TEXT NOT NULL, + claude_code_session_id TEXT, + ip_address INET NOT NULL, + review_cache JSONB, + last_viewed_at TIMESTAMPTZ, + r2_path TEXT NOT NULL, + deleted_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + email TEXT NOT NULL, + pr_title TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'pending' +); + +-- Index for rate limiting queries (IP + time range) +CREATE INDEX IF NOT EXISTS idx_reviews_ip_created ON reviews (ip_address, created_at); diff --git a/crates/remote/migrations/20251215000000_github_app_installations.sql b/crates/remote/migrations/20251215000000_github_app_installations.sql new file mode 100644 index 00000000..40e3933b --- /dev/null +++ b/crates/remote/migrations/20251215000000_github_app_installations.sql @@ -0,0 +1,40 @@ +-- GitHub App installations linked to organizations +CREATE TABLE github_app_installations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + organization_id UUID NOT NULL REFERENCES organizations(id) ON DELETE CASCADE, + github_installation_id BIGINT NOT NULL UNIQUE, + github_account_login TEXT NOT NULL, + github_account_type TEXT NOT NULL, -- 'Organization' or 'User' + repository_selection TEXT NOT NULL, -- 'all' or 'selected' + installed_by_user_id UUID REFERENCES users(id) ON DELETE SET NULL, + suspended_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_github_app_installations_org ON github_app_installations(organization_id); + +-- Repositories accessible via an installation +CREATE TABLE github_app_repositories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + installation_id UUID NOT NULL REFERENCES github_app_installations(id) ON DELETE CASCADE, + github_repo_id BIGINT NOT NULL, + repo_full_name TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(installation_id, github_repo_id) +); + +CREATE INDEX idx_github_app_repos_installation ON github_app_repositories(installation_id); + +-- Track pending installations (before callback completes) +CREATE TABLE github_app_pending_installations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + organization_id UUID NOT NULL REFERENCES organizations(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + state_token TEXT NOT NULL UNIQUE, + expires_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_pending_installations_state ON github_app_pending_installations(state_token); +CREATE INDEX idx_pending_installations_expires ON github_app_pending_installations(expires_at); diff --git a/crates/remote/migrations/20251216000000_add_webhook_fields_to_reviews.sql b/crates/remote/migrations/20251216000000_add_webhook_fields_to_reviews.sql new file mode 100644 index 00000000..7414e513 --- /dev/null +++ b/crates/remote/migrations/20251216000000_add_webhook_fields_to_reviews.sql @@ -0,0 +1,15 @@ +-- Make email and ip_address nullable for webhook-triggered reviews +ALTER TABLE reviews +ALTER COLUMN email DROP NOT NULL, +ALTER COLUMN ip_address DROP NOT NULL; + +-- Add webhook-specific columns +ALTER TABLE reviews +ADD COLUMN github_installation_id BIGINT, +ADD COLUMN pr_owner TEXT, +ADD COLUMN pr_repo TEXT, +ADD COLUMN pr_number INTEGER; + +-- Index for webhook reviews +CREATE INDEX idx_reviews_webhook ON reviews (github_installation_id) +WHERE github_installation_id IS NOT NULL; diff --git a/crates/remote/src/app.rs b/crates/remote/src/app.rs index 4cdeedc5..d73ff679 100644 --- a/crates/remote/src/app.rs +++ b/crates/remote/src/app.rs @@ -12,7 +12,9 @@ use crate::{ }, config::RemoteServerConfig, db, + github_app::GitHubAppService, mail::LoopsMailer, + r2::R2Service, routes, }; @@ -84,7 +86,44 @@ impl Server { ) })?; - let http_client = reqwest::Client::new(); + let r2 = config.r2.as_ref().map(R2Service::new); + if r2.is_some() { + tracing::info!("R2 storage service initialized"); + } else { + tracing::warn!( + "R2 storage service not configured. Set R2_ACCESS_KEY_ID, R2_SECRET_ACCESS_KEY, R2_REVIEW_ENDPOINT, and R2_REVIEW_BUCKET to enable." + ); + } + + let http_client = reqwest::Client::builder() + .user_agent("VibeKanbanRemote/1.0") + .build() + .context("failed to create HTTP client")?; + + let github_app = match &config.github_app { + Some(github_config) => { + match GitHubAppService::new(github_config, http_client.clone()) { + Ok(service) => { + tracing::info!( + app_slug = %github_config.app_slug, + "GitHub App service initialized" + ); + Some(Arc::new(service)) + } + Err(e) => { + tracing::error!(?e, "Failed to initialize GitHub App service"); + None + } + } + } + None => { + tracing::info!( + "GitHub App not configured. Set GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY, GITHUB_APP_WEBHOOK_SECRET, and GITHUB_APP_SLUG to enable." + ); + None + } + }; + let state = AppState::new( pool.clone(), config.clone(), @@ -94,6 +133,8 @@ impl Server { mailer, server_public_base_url, http_client, + r2, + github_app, ); let router = routes::router(state); diff --git a/crates/remote/src/config.rs b/crates/remote/src/config.rs index 528eec96..22ae3b32 100644 --- a/crates/remote/src/config.rs +++ b/crates/remote/src/config.rs @@ -13,6 +13,105 @@ pub struct RemoteServerConfig { pub electric_url: String, pub electric_secret: Option, pub electric_role_password: Option, + pub r2: Option, + pub review_worker_base_url: Option, + pub github_app: Option, +} + +#[derive(Debug, Clone)] +pub struct R2Config { + pub access_key_id: String, + pub secret_access_key: SecretString, + pub endpoint: String, + pub bucket: String, + pub presign_expiry_secs: u64, +} + +impl R2Config { + pub fn from_env() -> Result, ConfigError> { + let access_key_id = match env::var("R2_ACCESS_KEY_ID") { + Ok(v) => v, + Err(_) => { + tracing::info!("R2_ACCESS_KEY_ID not set, R2 storage disabled"); + return Ok(None); + } + }; + + tracing::info!("R2_ACCESS_KEY_ID is set, checking other R2 env vars"); + + let secret_access_key = env::var("R2_SECRET_ACCESS_KEY") + .map_err(|_| ConfigError::MissingVar("R2_SECRET_ACCESS_KEY"))?; + + let endpoint = env::var("R2_REVIEW_ENDPOINT") + .map_err(|_| ConfigError::MissingVar("R2_REVIEW_ENDPOINT"))?; + + let bucket = env::var("R2_REVIEW_BUCKET") + .map_err(|_| ConfigError::MissingVar("R2_REVIEW_BUCKET"))?; + + let presign_expiry_secs = env::var("R2_PRESIGN_EXPIRY_SECS") + .ok() + .and_then(|v| v.parse().ok()) + .unwrap_or(3600); + + tracing::info!(endpoint = %endpoint, bucket = %bucket, "R2 config loaded successfully"); + + Ok(Some(Self { + access_key_id, + secret_access_key: SecretString::new(secret_access_key.into()), + endpoint, + bucket, + presign_expiry_secs, + })) + } +} + +#[derive(Debug, Clone)] +pub struct GitHubAppConfig { + pub app_id: u64, + pub private_key: SecretString, // Base64-encoded PEM + pub webhook_secret: SecretString, + pub app_slug: String, +} + +impl GitHubAppConfig { + pub fn from_env() -> Result, ConfigError> { + let app_id = match env::var("GITHUB_APP_ID") { + Ok(v) => v, + Err(_) => { + tracing::info!("GITHUB_APP_ID not set, GitHub App integration disabled"); + return Ok(None); + } + }; + + let app_id: u64 = app_id + .parse() + .map_err(|_| ConfigError::InvalidVar("GITHUB_APP_ID"))?; + + tracing::info!("GITHUB_APP_ID is set, checking other GitHub App env vars"); + + let private_key = env::var("GITHUB_APP_PRIVATE_KEY") + .map_err(|_| ConfigError::MissingVar("GITHUB_APP_PRIVATE_KEY"))?; + + // Validate that the private key is valid base64 + BASE64_STANDARD + .decode(private_key.as_bytes()) + .map_err(|_| ConfigError::InvalidVar("GITHUB_APP_PRIVATE_KEY"))?; + + let webhook_secret = env::var("GITHUB_APP_WEBHOOK_SECRET") + .map_err(|_| ConfigError::MissingVar("GITHUB_APP_WEBHOOK_SECRET"))?; + + let app_slug = + env::var("GITHUB_APP_SLUG").map_err(|_| ConfigError::MissingVar("GITHUB_APP_SLUG"))?; + + tracing::info!(app_id = %app_id, app_slug = %app_slug, "GitHub App config loaded successfully"); + + Ok(Some(Self { + app_id, + private_key: SecretString::new(private_key.into()), + webhook_secret: SecretString::new(webhook_secret.into()), + app_slug, + })) + } } #[derive(Debug, Error)] @@ -49,6 +148,12 @@ impl RemoteServerConfig { .ok() .map(|s| SecretString::new(s.into())); + let r2 = R2Config::from_env()?; + + let review_worker_base_url = env::var("REVIEW_WORKER_BASE_URL").ok(); + + let github_app = GitHubAppConfig::from_env()?; + Ok(Self { database_url, listen_addr, @@ -57,6 +162,9 @@ impl RemoteServerConfig { electric_url, electric_secret, electric_role_password, + r2, + review_worker_base_url, + github_app, }) } } diff --git a/crates/remote/src/db/github_app.rs b/crates/remote/src/db/github_app.rs new file mode 100644 index 00000000..fa5be4d0 --- /dev/null +++ b/crates/remote/src/db/github_app.rs @@ -0,0 +1,490 @@ +use chrono::{DateTime, Utc}; +use sqlx::{FromRow, PgPool}; +use thiserror::Error; +use uuid::Uuid; + +#[derive(Debug, Error)] +pub enum GitHubAppDbError { + #[error("database error: {0}")] + Database(#[from] sqlx::Error), + #[error("installation not found")] + NotFound, + #[error("pending installation not found or expired")] + PendingNotFound, +} + +/// A GitHub App installation linked to an organization +#[derive(Debug, Clone, FromRow)] +pub struct GitHubAppInstallation { + pub id: Uuid, + pub organization_id: Uuid, + pub github_installation_id: i64, + pub github_account_login: String, + pub github_account_type: String, + pub repository_selection: String, + pub installed_by_user_id: Option, + pub suspended_at: Option>, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +/// A repository accessible via an installation +#[derive(Debug, Clone, FromRow)] +pub struct GitHubAppRepository { + pub id: Uuid, + pub installation_id: Uuid, + pub github_repo_id: i64, + pub repo_full_name: String, + pub created_at: DateTime, +} + +/// A pending installation waiting for callback +#[derive(Debug, Clone, FromRow)] +pub struct PendingInstallation { + pub id: Uuid, + pub organization_id: Uuid, + pub user_id: Uuid, + pub state_token: String, + pub expires_at: DateTime, + pub created_at: DateTime, +} + +pub struct GitHubAppRepository2<'a> { + pool: &'a PgPool, +} + +impl<'a> GitHubAppRepository2<'a> { + pub fn new(pool: &'a PgPool) -> Self { + Self { pool } + } + + // ========== Installations ========== + + pub async fn create_installation( + &self, + organization_id: Uuid, + github_installation_id: i64, + github_account_login: &str, + github_account_type: &str, + repository_selection: &str, + installed_by_user_id: Uuid, + ) -> Result { + let installation = sqlx::query_as!( + GitHubAppInstallation, + r#" + INSERT INTO github_app_installations ( + organization_id, + github_installation_id, + github_account_login, + github_account_type, + repository_selection, + installed_by_user_id + ) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT (github_installation_id) DO UPDATE SET + organization_id = EXCLUDED.organization_id, + github_account_login = EXCLUDED.github_account_login, + github_account_type = EXCLUDED.github_account_type, + repository_selection = EXCLUDED.repository_selection, + installed_by_user_id = EXCLUDED.installed_by_user_id, + suspended_at = NULL, + updated_at = NOW() + RETURNING + id, + organization_id, + github_installation_id, + github_account_login, + github_account_type, + repository_selection, + installed_by_user_id, + suspended_at, + created_at, + updated_at + "#, + organization_id, + github_installation_id, + github_account_login, + github_account_type, + repository_selection, + installed_by_user_id + ) + .fetch_one(self.pool) + .await?; + + Ok(installation) + } + + pub async fn get_by_github_id( + &self, + github_installation_id: i64, + ) -> Result, GitHubAppDbError> { + let installation = sqlx::query_as!( + GitHubAppInstallation, + r#" + SELECT + id, + organization_id, + github_installation_id, + github_account_login, + github_account_type, + repository_selection, + installed_by_user_id, + suspended_at, + created_at, + updated_at + FROM github_app_installations + WHERE github_installation_id = $1 + "#, + github_installation_id + ) + .fetch_optional(self.pool) + .await?; + + Ok(installation) + } + + /// Find an installation by the GitHub account login (owner name) + pub async fn get_by_account_login( + &self, + account_login: &str, + ) -> Result, GitHubAppDbError> { + let installation = sqlx::query_as!( + GitHubAppInstallation, + r#" + SELECT + id, + organization_id, + github_installation_id, + github_account_login, + github_account_type, + repository_selection, + installed_by_user_id, + suspended_at, + created_at, + updated_at + FROM github_app_installations + WHERE github_account_login = $1 + "#, + account_login + ) + .fetch_optional(self.pool) + .await?; + + Ok(installation) + } + + pub async fn get_by_organization( + &self, + organization_id: Uuid, + ) -> Result, GitHubAppDbError> { + let installation = sqlx::query_as!( + GitHubAppInstallation, + r#" + SELECT + id, + organization_id, + github_installation_id, + github_account_login, + github_account_type, + repository_selection, + installed_by_user_id, + suspended_at, + created_at, + updated_at + FROM github_app_installations + WHERE organization_id = $1 + "#, + organization_id + ) + .fetch_optional(self.pool) + .await?; + + Ok(installation) + } + + pub async fn delete_by_github_id( + &self, + github_installation_id: i64, + ) -> Result<(), GitHubAppDbError> { + sqlx::query!( + r#" + DELETE FROM github_app_installations + WHERE github_installation_id = $1 + "#, + github_installation_id + ) + .execute(self.pool) + .await?; + + Ok(()) + } + + pub async fn delete_by_organization( + &self, + organization_id: Uuid, + ) -> Result<(), GitHubAppDbError> { + sqlx::query!( + r#" + DELETE FROM github_app_installations + WHERE organization_id = $1 + "#, + organization_id + ) + .execute(self.pool) + .await?; + + Ok(()) + } + + pub async fn suspend(&self, github_installation_id: i64) -> Result<(), GitHubAppDbError> { + sqlx::query!( + r#" + UPDATE github_app_installations + SET suspended_at = NOW(), updated_at = NOW() + WHERE github_installation_id = $1 + "#, + github_installation_id + ) + .execute(self.pool) + .await?; + + Ok(()) + } + + pub async fn unsuspend(&self, github_installation_id: i64) -> Result<(), GitHubAppDbError> { + sqlx::query!( + r#" + UPDATE github_app_installations + SET suspended_at = NULL, updated_at = NOW() + WHERE github_installation_id = $1 + "#, + github_installation_id + ) + .execute(self.pool) + .await?; + + Ok(()) + } + + pub async fn update_repository_selection( + &self, + github_installation_id: i64, + repository_selection: &str, + ) -> Result<(), GitHubAppDbError> { + sqlx::query!( + r#" + UPDATE github_app_installations + SET repository_selection = $2, updated_at = NOW() + WHERE github_installation_id = $1 + "#, + github_installation_id, + repository_selection + ) + .execute(self.pool) + .await?; + + Ok(()) + } + + // ========== Repositories ========== + + pub async fn sync_repositories( + &self, + installation_id: Uuid, + repos: &[(i64, String)], // (github_repo_id, repo_full_name) + ) -> Result<(), GitHubAppDbError> { + // Delete all existing repos for this installation + sqlx::query!( + r#" + DELETE FROM github_app_repositories + WHERE installation_id = $1 + "#, + installation_id + ) + .execute(self.pool) + .await?; + + // Insert new repos + for (github_repo_id, repo_full_name) in repos { + sqlx::query!( + r#" + INSERT INTO github_app_repositories (installation_id, github_repo_id, repo_full_name) + VALUES ($1, $2, $3) + ON CONFLICT (installation_id, github_repo_id) DO UPDATE SET + repo_full_name = EXCLUDED.repo_full_name + "#, + installation_id, + github_repo_id, + repo_full_name + ) + .execute(self.pool) + .await?; + } + + Ok(()) + } + + pub async fn get_repositories( + &self, + installation_id: Uuid, + ) -> Result, GitHubAppDbError> { + let repos = sqlx::query_as!( + GitHubAppRepository, + r#" + SELECT + id, + installation_id, + github_repo_id, + repo_full_name, + created_at + FROM github_app_repositories + WHERE installation_id = $1 + ORDER BY repo_full_name + "#, + installation_id + ) + .fetch_all(self.pool) + .await?; + + Ok(repos) + } + + pub async fn add_repositories( + &self, + installation_id: Uuid, + repos: &[(i64, String)], + ) -> Result<(), GitHubAppDbError> { + for (github_repo_id, repo_full_name) in repos { + sqlx::query!( + r#" + INSERT INTO github_app_repositories (installation_id, github_repo_id, repo_full_name) + VALUES ($1, $2, $3) + ON CONFLICT (installation_id, github_repo_id) DO UPDATE SET + repo_full_name = EXCLUDED.repo_full_name + "#, + installation_id, + github_repo_id, + repo_full_name + ) + .execute(self.pool) + .await?; + } + + Ok(()) + } + + pub async fn remove_repositories( + &self, + installation_id: Uuid, + github_repo_ids: &[i64], + ) -> Result<(), GitHubAppDbError> { + sqlx::query!( + r#" + DELETE FROM github_app_repositories + WHERE installation_id = $1 AND github_repo_id = ANY($2) + "#, + installation_id, + github_repo_ids + ) + .execute(self.pool) + .await?; + + Ok(()) + } + + // ========== Pending Installations ========== + + pub async fn create_pending( + &self, + organization_id: Uuid, + user_id: Uuid, + state_token: &str, + expires_at: DateTime, + ) -> Result { + // Delete any existing pending installation for this org + sqlx::query!( + r#" + DELETE FROM github_app_pending_installations + WHERE organization_id = $1 + "#, + organization_id + ) + .execute(self.pool) + .await?; + + let pending = sqlx::query_as!( + PendingInstallation, + r#" + INSERT INTO github_app_pending_installations (organization_id, user_id, state_token, expires_at) + VALUES ($1, $2, $3, $4) + RETURNING + id, + organization_id, + user_id, + state_token, + expires_at, + created_at + "#, + organization_id, + user_id, + state_token, + expires_at + ) + .fetch_one(self.pool) + .await?; + + Ok(pending) + } + + pub async fn get_pending_by_state( + &self, + state_token: &str, + ) -> Result, GitHubAppDbError> { + let pending = sqlx::query_as!( + PendingInstallation, + r#" + SELECT + id, + organization_id, + user_id, + state_token, + expires_at, + created_at + FROM github_app_pending_installations + WHERE state_token = $1 AND expires_at > NOW() + "#, + state_token + ) + .fetch_optional(self.pool) + .await?; + + Ok(pending) + } + + pub async fn delete_pending(&self, state_token: &str) -> Result<(), GitHubAppDbError> { + sqlx::query!( + r#" + DELETE FROM github_app_pending_installations + WHERE state_token = $1 + "#, + state_token + ) + .execute(self.pool) + .await?; + + Ok(()) + } + + pub async fn cleanup_expired_pending(&self) -> Result { + let result = sqlx::query!( + r#" + DELETE FROM github_app_pending_installations + WHERE expires_at < NOW() + "# + ) + .execute(self.pool) + .await?; + + Ok(result.rows_affected()) + } +} diff --git a/crates/remote/src/db/mod.rs b/crates/remote/src/db/mod.rs index 64afd66d..ee5895f2 100644 --- a/crates/remote/src/db/mod.rs +++ b/crates/remote/src/db/mod.rs @@ -1,4 +1,5 @@ pub mod auth; +pub mod github_app; pub mod identity_errors; pub mod invitations; pub mod oauth; @@ -6,6 +7,7 @@ pub mod oauth_accounts; pub mod organization_members; pub mod organizations; pub mod projects; +pub mod reviews; pub mod tasks; pub mod users; diff --git a/crates/remote/src/db/reviews.rs b/crates/remote/src/db/reviews.rs new file mode 100644 index 00000000..e105fd5d --- /dev/null +++ b/crates/remote/src/db/reviews.rs @@ -0,0 +1,252 @@ +use std::net::IpAddr; + +use chrono::{DateTime, Utc}; +use ipnetwork::IpNetwork; +use serde::Serialize; +use sqlx::{PgPool, query_as}; +use thiserror::Error; +use uuid::Uuid; + +#[derive(Debug, Error)] +pub enum ReviewError { + #[error("review not found")] + NotFound, + #[error(transparent)] + Database(#[from] sqlx::Error), +} + +#[derive(Debug, Clone, sqlx::FromRow, Serialize)] +pub struct Review { + pub id: Uuid, + pub gh_pr_url: String, + pub claude_code_session_id: Option, + pub ip_address: Option, + pub review_cache: Option, + pub last_viewed_at: Option>, + pub r2_path: String, + pub deleted_at: Option>, + pub created_at: DateTime, + pub email: Option, + pub pr_title: String, + pub status: String, + // Webhook-specific fields + pub github_installation_id: Option, + pub pr_owner: Option, + pub pr_repo: Option, + pub pr_number: Option, +} + +impl Review { + /// Returns true if this review was triggered by a GitHub webhook + pub fn is_webhook_review(&self) -> bool { + self.github_installation_id.is_some() + } +} + +/// Parameters for creating a new review (CLI-triggered) +pub struct CreateReviewParams<'a> { + pub id: Uuid, + pub gh_pr_url: &'a str, + pub claude_code_session_id: Option<&'a str>, + pub ip_address: IpAddr, + pub r2_path: &'a str, + pub email: &'a str, + pub pr_title: &'a str, +} + +/// Parameters for creating a webhook-triggered review +pub struct CreateWebhookReviewParams<'a> { + pub id: Uuid, + pub gh_pr_url: &'a str, + pub r2_path: &'a str, + pub pr_title: &'a str, + pub github_installation_id: i64, + pub pr_owner: &'a str, + pub pr_repo: &'a str, + pub pr_number: i32, +} + +pub struct ReviewRepository<'a> { + pool: &'a PgPool, +} + +impl<'a> ReviewRepository<'a> { + pub fn new(pool: &'a PgPool) -> Self { + Self { pool } + } + + pub async fn create(&self, params: CreateReviewParams<'_>) -> Result { + let ip_network = IpNetwork::from(params.ip_address); + + query_as!( + Review, + r#" + INSERT INTO reviews (id, gh_pr_url, claude_code_session_id, ip_address, r2_path, email, pr_title) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING + id, + gh_pr_url, + claude_code_session_id, + ip_address AS "ip_address: IpNetwork", + review_cache, + last_viewed_at, + r2_path, + deleted_at, + created_at, + email, + pr_title, + status, + github_installation_id, + pr_owner, + pr_repo, + pr_number + "#, + params.id, + params.gh_pr_url, + params.claude_code_session_id, + ip_network, + params.r2_path, + params.email, + params.pr_title + ) + .fetch_one(self.pool) + .await + .map_err(ReviewError::from) + } + + /// Create a webhook-triggered review (no email/IP) + pub async fn create_webhook_review( + &self, + params: CreateWebhookReviewParams<'_>, + ) -> Result { + query_as!( + Review, + r#" + INSERT INTO reviews (id, gh_pr_url, r2_path, pr_title, github_installation_id, pr_owner, pr_repo, pr_number) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + RETURNING + id, + gh_pr_url, + claude_code_session_id, + ip_address AS "ip_address: IpNetwork", + review_cache, + last_viewed_at, + r2_path, + deleted_at, + created_at, + email, + pr_title, + status, + github_installation_id, + pr_owner, + pr_repo, + pr_number + "#, + params.id, + params.gh_pr_url, + params.r2_path, + params.pr_title, + params.github_installation_id, + params.pr_owner, + params.pr_repo, + params.pr_number + ) + .fetch_one(self.pool) + .await + .map_err(ReviewError::from) + } + + /// Get a review by its ID. + /// Returns NotFound if the review doesn't exist or has been deleted. + pub async fn get_by_id(&self, id: Uuid) -> Result { + query_as!( + Review, + r#" + SELECT + id, + gh_pr_url, + claude_code_session_id, + ip_address AS "ip_address: IpNetwork", + review_cache, + last_viewed_at, + r2_path, + deleted_at, + created_at, + email, + pr_title, + status, + github_installation_id, + pr_owner, + pr_repo, + pr_number + FROM reviews + WHERE id = $1 AND deleted_at IS NULL + "#, + id + ) + .fetch_optional(self.pool) + .await? + .ok_or(ReviewError::NotFound) + } + + /// Count reviews from an IP address since a given timestamp. + /// Used for rate limiting. + pub async fn count_since( + &self, + ip_address: IpAddr, + since: DateTime, + ) -> Result { + let ip_network = IpNetwork::from(ip_address); + + let result = sqlx::query!( + r#" + SELECT COUNT(*) as "count!" + FROM reviews + WHERE ip_address = $1 + AND created_at > $2 + AND deleted_at IS NULL + "#, + ip_network, + since + ) + .fetch_one(self.pool) + .await + .map_err(ReviewError::from)?; + + Ok(result.count) + } + + /// Mark a review as completed + pub async fn mark_completed(&self, id: Uuid) -> Result<(), ReviewError> { + sqlx::query!( + r#" + UPDATE reviews + SET status = 'completed' + WHERE id = $1 AND deleted_at IS NULL + "#, + id + ) + .execute(self.pool) + .await + .map_err(ReviewError::from)?; + + Ok(()) + } + + /// Mark a review as failed + pub async fn mark_failed(&self, id: Uuid) -> Result<(), ReviewError> { + sqlx::query!( + r#" + UPDATE reviews + SET status = 'failed' + WHERE id = $1 AND deleted_at IS NULL + "#, + id + ) + .execute(self.pool) + .await + .map_err(ReviewError::from)?; + + Ok(()) + } +} diff --git a/crates/remote/src/github_app/jwt.rs b/crates/remote/src/github_app/jwt.rs new file mode 100644 index 00000000..ae0cbc76 --- /dev/null +++ b/crates/remote/src/github_app/jwt.rs @@ -0,0 +1,97 @@ +use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD}; +use jsonwebtoken::{Algorithm, EncodingKey, Header, encode}; +use secrecy::{ExposeSecret, SecretString}; +use serde::Serialize; +use thiserror::Error; + +/// JWT generator for GitHub App authentication. +/// GitHub Apps authenticate using RS256-signed JWTs with a 10-minute max TTL. +#[derive(Clone)] +pub struct GitHubAppJwt { + app_id: u64, + private_key_pem: SecretString, +} + +#[derive(Debug, Error)] +pub enum JwtError { + #[error("invalid private key: {0}")] + InvalidPrivateKey(String), + #[error("failed to encode JWT: {0}")] + EncodingError(#[from] jsonwebtoken::errors::Error), + #[error("invalid base64 encoding")] + Base64Error, +} + +#[derive(Debug, Serialize)] +struct GitHubAppClaims { + /// Issuer - the GitHub App ID + iss: String, + /// Issued at (Unix timestamp) + iat: i64, + /// Expiration (Unix timestamp) - max 10 minutes from iat + exp: i64, +} + +impl GitHubAppJwt { + /// Create a new JWT generator from base64-encoded PEM private key + pub fn new(app_id: u64, private_key_base64: SecretString) -> Result { + // Decode base64 to get raw PEM + let pem_bytes = BASE64_STANDARD + .decode(private_key_base64.expose_secret().as_bytes()) + .map_err(|_| JwtError::Base64Error)?; + + let pem_string = String::from_utf8(pem_bytes) + .map_err(|_| JwtError::InvalidPrivateKey("PEM is not valid UTF-8".to_string()))?; + + // Validate we can parse this as an RSA key + EncodingKey::from_rsa_pem(pem_string.as_bytes()) + .map_err(|e| JwtError::InvalidPrivateKey(e.to_string()))?; + + Ok(Self { + app_id, + private_key_pem: SecretString::new(pem_string.into()), + }) + } + + /// Generate a JWT for authenticating as the GitHub App. + /// This JWT is used to get installation access tokens. + /// Max TTL is 10 minutes as per GitHub's requirements. + pub fn generate(&self) -> Result { + let now = chrono::Utc::now().timestamp(); + // Subtract 60 seconds from iat to account for clock drift + let iat = now - 60; + // GitHub allows max 10 minutes, we use 9 to be safe + let exp = now + (9 * 60); + + let claims = GitHubAppClaims { + iss: self.app_id.to_string(), + iat, + exp, + }; + + let header = Header::new(Algorithm::RS256); + let key = EncodingKey::from_rsa_pem(self.private_key_pem.expose_secret().as_bytes())?; + + encode(&header, &claims, &key).map_err(JwtError::EncodingError) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + // Test with a dummy key - in real tests you'd use a proper test key + #[test] + fn test_invalid_base64_fails() { + let result = GitHubAppJwt::new(12345, SecretString::new("not-valid-base64!!!".into())); + assert!(matches!(result, Err(JwtError::Base64Error))); + } + + #[test] + fn test_invalid_pem_fails() { + // Valid base64, but not a valid PEM + let invalid_pem_b64 = BASE64_STANDARD.encode("not a real pem key"); + let result = GitHubAppJwt::new(12345, SecretString::new(invalid_pem_b64.into())); + assert!(matches!(result, Err(JwtError::InvalidPrivateKey(_)))); + } +} diff --git a/crates/remote/src/github_app/mod.rs b/crates/remote/src/github_app/mod.rs new file mode 100644 index 00000000..e203d699 --- /dev/null +++ b/crates/remote/src/github_app/mod.rs @@ -0,0 +1,9 @@ +mod jwt; +mod pr_review; +mod service; +mod webhook; + +pub use jwt::GitHubAppJwt; +pub use pr_review::{PrReviewError, PrReviewParams, PrReviewService}; +pub use service::{GitHubAppService, InstallationInfo, PrDetails, PrRef, Repository}; +pub use webhook::verify_webhook_signature; diff --git a/crates/remote/src/github_app/pr_review.rs b/crates/remote/src/github_app/pr_review.rs new file mode 100644 index 00000000..502e39e7 --- /dev/null +++ b/crates/remote/src/github_app/pr_review.rs @@ -0,0 +1,243 @@ +//! PR Review service for webhook-triggered code reviews. + +use std::{fs::File, path::Path}; + +use flate2::{Compression, write::GzEncoder}; +use reqwest::Client; +use sqlx::PgPool; +use tar::Builder; +use thiserror::Error; +use tracing::{debug, error, info}; +use uuid::Uuid; + +use super::service::{GitHubAppError, GitHubAppService}; +use crate::{ + db::reviews::{CreateWebhookReviewParams, ReviewError, ReviewRepository}, + r2::{R2Error, R2Service}, +}; + +/// Parameters extracted from webhook payload for PR review +#[derive(Debug, Clone)] +pub struct PrReviewParams { + pub installation_id: i64, + pub owner: String, + pub repo: String, + pub pr_number: u64, + pub pr_title: String, + pub pr_body: String, + pub head_sha: String, + pub base_sha: String, +} + +#[derive(Debug, Error)] +pub enum PrReviewError { + #[error("GitHub error: {0}")] + GitHub(#[from] GitHubAppError), + #[error("R2 error: {0}")] + R2(#[from] R2Error), + #[error("Database error: {0}")] + Database(#[from] ReviewError), + #[error("Archive error: {0}")] + Archive(String), + #[error("Worker error: {0}")] + Worker(String), +} + +/// Service for processing webhook-triggered PR reviews +pub struct PrReviewService { + github_app: GitHubAppService, + r2: R2Service, + http_client: Client, + worker_base_url: String, + server_base_url: String, +} + +impl PrReviewService { + pub fn new( + github_app: GitHubAppService, + r2: R2Service, + http_client: Client, + worker_base_url: String, + server_base_url: String, + ) -> Self { + Self { + github_app, + r2, + http_client, + worker_base_url, + server_base_url, + } + } + + /// Process a PR review from webhook. + /// + /// This will: + /// 1. Clone the repository at the PR head commit + /// 2. Create a tarball of the repository + /// 3. Upload the tarball to R2 + /// 4. Create a review record in the database + /// 5. Start the review worker + /// + /// Returns the review ID on success. + pub async fn process_pr_review( + &self, + pool: &PgPool, + params: PrReviewParams, + ) -> Result { + let review_id = Uuid::new_v4(); + + info!( + review_id = %review_id, + owner = %params.owner, + repo = %params.repo, + pr_number = params.pr_number, + "Starting webhook PR review" + ); + + // 1. Clone the repository + let temp_dir = self + .github_app + .clone_repo( + params.installation_id, + ¶ms.owner, + ¶ms.repo, + ¶ms.head_sha, + ) + .await?; + + debug!(review_id = %review_id, "Repository cloned"); + + // 2. Create tarball + let tarball = + create_tarball(temp_dir.path()).map_err(|e| PrReviewError::Archive(e.to_string()))?; + + let tarball_size_mb = tarball.len() as f64 / 1_048_576.0; + debug!(review_id = %review_id, size_mb = tarball_size_mb, "Tarball created"); + + // 3. Upload to R2 + let r2_path = self.r2.upload_bytes(review_id, tarball).await?; + debug!(review_id = %review_id, r2_path = %r2_path, "Uploaded to R2"); + + // 4. Create review record in database + let gh_pr_url = format!( + "https://github.com/{}/{}/pull/{}", + params.owner, params.repo, params.pr_number + ); + + let repo = ReviewRepository::new(pool); + repo.create_webhook_review(CreateWebhookReviewParams { + id: review_id, + gh_pr_url: &gh_pr_url, + r2_path: &r2_path, + pr_title: ¶ms.pr_title, + github_installation_id: params.installation_id, + pr_owner: ¶ms.owner, + pr_repo: ¶ms.repo, + pr_number: params.pr_number as i32, + }) + .await?; + + debug!(review_id = %review_id, "Review record created"); + + // 5. Start the review worker + let codebase_url = format!( + "{}/reviews/{}/payload.tar.gz", + self.r2_public_url(), + review_id + ); + let callback_url = format!("{}/review/{}", self.server_base_url, review_id); + + let start_request = serde_json::json!({ + "id": review_id.to_string(), + "title": params.pr_title, + "description": params.pr_body, + "org": params.owner, + "repo": params.repo, + "codebaseUrl": codebase_url, + "baseCommit": params.base_sha, + "callbackUrl": callback_url, + }); + + let response = self + .http_client + .post(format!("{}/review/start", self.worker_base_url)) + .json(&start_request) + .send() + .await + .map_err(|e| PrReviewError::Worker(format!("Failed to call worker: {e}")))?; + + if !response.status().is_success() { + let status = response.status(); + let body = response.text().await.unwrap_or_default(); + error!(review_id = %review_id, status = %status, body = %body, "Worker returned error"); + return Err(PrReviewError::Worker(format!( + "Worker returned {}: {}", + status, body + ))); + } + + info!(review_id = %review_id, "Review worker started successfully"); + + Ok(review_id) + } + + /// Get the public URL for R2 (used to construct codebase URLs for the worker). + /// This assumes the R2 bucket has public read access configured. + fn r2_public_url(&self) -> &str { + // The worker needs to be able to fetch the tarball from R2. + // This is typically configured via a public bucket URL or CDN. + // For now, we'll use the worker base URL as a proxy assumption. + // In production, this should be configured separately. + &self.worker_base_url + } +} + +/// Create a tar.gz archive from a directory +fn create_tarball(source_dir: &Path) -> Result, std::io::Error> { + debug!("Creating tarball from {}", source_dir.display()); + + let mut buffer = Vec::new(); + + { + let encoder = GzEncoder::new(&mut buffer, Compression::default()); + let mut archive = Builder::new(encoder); + + add_directory_to_archive(&mut archive, source_dir, source_dir)?; + + let encoder = archive.into_inner()?; + encoder.finish()?; + } + + debug!("Created tarball: {} bytes", buffer.len()); + + Ok(buffer) +} + +fn add_directory_to_archive( + archive: &mut Builder, + base_dir: &Path, + current_dir: &Path, +) -> Result<(), std::io::Error> { + let entries = std::fs::read_dir(current_dir)?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + + let relative_path = path.strip_prefix(base_dir).map_err(std::io::Error::other)?; + + let metadata = entry.metadata()?; + + if metadata.is_dir() { + // Recursively add directory contents + add_directory_to_archive(archive, base_dir, &path)?; + } else if metadata.is_file() { + // Add file to archive + let mut file = File::open(&path)?; + archive.append_file(relative_path, &mut file)?; + } + // Skip symlinks and other special files + } + + Ok(()) +} diff --git a/crates/remote/src/github_app/service.rs b/crates/remote/src/github_app/service.rs new file mode 100644 index 00000000..2c6a4805 --- /dev/null +++ b/crates/remote/src/github_app/service.rs @@ -0,0 +1,387 @@ +use reqwest::Client; +use secrecy::SecretString; +use serde::{Deserialize, Serialize}; +use tempfile::TempDir; +use thiserror::Error; +use tokio::process::Command; +use tracing::{debug, info, warn}; + +use super::jwt::{GitHubAppJwt, JwtError}; +use crate::config::GitHubAppConfig; + +const USER_AGENT: &str = "VibeKanbanRemote/1.0"; +const GITHUB_API_BASE: &str = "https://api.github.com"; + +#[derive(Debug, Error)] +pub enum GitHubAppError { + #[error("JWT error: {0}")] + Jwt(#[from] JwtError), + #[error("HTTP request failed: {0}")] + Http(#[from] reqwest::Error), + #[error("GitHub API error: {status} - {message}")] + Api { status: u16, message: String }, + #[error("Installation not found")] + InstallationNotFound, + #[error("Git operation failed: {0}")] + GitOperation(String), +} + +/// Information about a GitHub App installation +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InstallationInfo { + pub id: i64, + pub account: InstallationAccount, + pub repository_selection: String, // "all" or "selected" + pub suspended_at: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InstallationAccount { + pub login: String, + #[serde(rename = "type")] + pub account_type: String, // "Organization" or "User" + pub id: i64, +} + +/// A repository accessible via an installation +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Repository { + pub id: i64, + pub full_name: String, + pub name: String, + pub private: bool, +} + +#[derive(Debug, Deserialize)] +struct InstallationTokenResponse { + token: String, + expires_at: String, +} + +#[derive(Debug, Deserialize)] +struct RepositoriesResponse { + repositories: Vec, +} + +/// Details about a pull request +#[derive(Debug, Clone, Deserialize)] +pub struct PrDetails { + pub title: String, + pub body: Option, + pub head: PrRef, + pub base: PrRef, +} + +/// A git ref (branch/commit) in a PR +#[derive(Debug, Clone, Deserialize)] +pub struct PrRef { + pub sha: String, +} + +/// Service for interacting with the GitHub App API +#[derive(Clone)] +pub struct GitHubAppService { + jwt_generator: GitHubAppJwt, + client: Client, + app_slug: String, + webhook_secret: SecretString, +} + +impl GitHubAppService { + pub fn new(config: &GitHubAppConfig, client: Client) -> Result { + let jwt_generator = GitHubAppJwt::new(config.app_id, config.private_key.clone())?; + + Ok(Self { + jwt_generator, + client, + app_slug: config.app_slug.clone(), + webhook_secret: config.webhook_secret.clone(), + }) + } + + /// Get the app slug for constructing installation URLs + pub fn app_slug(&self) -> &str { + &self.app_slug + } + + /// Get the webhook secret for signature verification + pub fn webhook_secret(&self) -> &SecretString { + &self.webhook_secret + } + + /// Get an installation access token for making API calls on behalf of an installation + pub async fn get_installation_token( + &self, + installation_id: i64, + ) -> Result { + let jwt = self.jwt_generator.generate()?; + + let url = format!( + "{}/app/installations/{}/access_tokens", + GITHUB_API_BASE, installation_id + ); + + let response = self + .client + .post(&url) + .header("Authorization", format!("Bearer {}", jwt)) + .header("Accept", "application/vnd.github+json") + .header("User-Agent", USER_AGENT) + .header("X-GitHub-Api-Version", "2022-11-28") + .send() + .await?; + + if !response.status().is_success() { + let status = response.status().as_u16(); + let message = response.text().await.unwrap_or_default(); + warn!( + installation_id, + status, message, "Failed to get installation token" + ); + return Err(GitHubAppError::Api { status, message }); + } + + let token_response: InstallationTokenResponse = response.json().await?; + info!( + installation_id, + expires_at = %token_response.expires_at, + "Got installation access token" + ); + + Ok(token_response.token) + } + + /// Get details about a specific installation + pub async fn get_installation( + &self, + installation_id: i64, + ) -> Result { + let jwt = self.jwt_generator.generate()?; + + let url = format!("{}/app/installations/{}", GITHUB_API_BASE, installation_id); + + let response = self + .client + .get(&url) + .header("Authorization", format!("Bearer {}", jwt)) + .header("Accept", "application/vnd.github+json") + .header("User-Agent", USER_AGENT) + .header("X-GitHub-Api-Version", "2022-11-28") + .send() + .await?; + + if response.status() == reqwest::StatusCode::NOT_FOUND { + return Err(GitHubAppError::InstallationNotFound); + } + + if !response.status().is_success() { + let status = response.status().as_u16(); + let message = response.text().await.unwrap_or_default(); + return Err(GitHubAppError::Api { status, message }); + } + + let installation: InstallationInfo = response.json().await?; + Ok(installation) + } + + /// List repositories accessible to an installation + pub async fn list_installation_repos( + &self, + installation_id: i64, + ) -> Result, GitHubAppError> { + let token = self.get_installation_token(installation_id).await?; + + let url = format!("{}/installation/repositories", GITHUB_API_BASE); + + let response = self + .client + .get(&url) + .header("Authorization", format!("Bearer {}", token)) + .header("Accept", "application/vnd.github+json") + .header("User-Agent", USER_AGENT) + .header("X-GitHub-Api-Version", "2022-11-28") + .query(&[("per_page", "100")]) + .send() + .await?; + + if !response.status().is_success() { + let status = response.status().as_u16(); + let message = response.text().await.unwrap_or_default(); + return Err(GitHubAppError::Api { status, message }); + } + + let repos_response: RepositoriesResponse = response.json().await?; + Ok(repos_response.repositories) + } + + /// Post a comment on a pull request + pub async fn post_pr_comment( + &self, + installation_id: i64, + owner: &str, + repo: &str, + pr_number: u64, + body: &str, + ) -> Result<(), GitHubAppError> { + let token = self.get_installation_token(installation_id).await?; + + // Use the issues API to post comments (PRs are issues in GitHub) + let url = format!( + "{}/repos/{}/{}/issues/{}/comments", + GITHUB_API_BASE, owner, repo, pr_number + ); + + let response = self + .client + .post(&url) + .header("Authorization", format!("Bearer {}", token)) + .header("Accept", "application/vnd.github+json") + .header("User-Agent", USER_AGENT) + .header("X-GitHub-Api-Version", "2022-11-28") + .json(&serde_json::json!({ "body": body })) + .send() + .await?; + + if !response.status().is_success() { + let status = response.status().as_u16(); + let message = response.text().await.unwrap_or_default(); + warn!( + owner, + repo, pr_number, status, message, "Failed to post PR comment" + ); + return Err(GitHubAppError::Api { status, message }); + } + + info!(owner, repo, pr_number, "Posted PR comment"); + Ok(()) + } + + /// Clone a repository using the installation token for authentication. + /// + /// Returns a TempDir containing the cloned repository at the specified commit. + /// The TempDir will be automatically cleaned up when dropped. + pub async fn clone_repo( + &self, + installation_id: i64, + owner: &str, + repo: &str, + head_sha: &str, + ) -> Result { + let token = self.get_installation_token(installation_id).await?; + + // Create temp directory + let temp_dir = tempfile::tempdir() + .map_err(|e| GitHubAppError::GitOperation(format!("Failed to create temp dir: {e}")))?; + + let clone_url = format!( + "https://x-access-token:{}@github.com/{}/{}.git", + token, owner, repo + ); + + debug!(owner, repo, head_sha, "Cloning repository"); + + // Clone the repository + let output = Command::new("git") + .args(["clone", "--depth", "1", &clone_url, "."]) + .current_dir(temp_dir.path()) + .output() + .await + .map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + GitHubAppError::GitOperation("git is not installed or not in PATH".to_string()) + } else { + GitHubAppError::GitOperation(format!("Failed to run git clone: {e}")) + } + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + // Redact the token from error messages + let redacted_stderr = stderr.replace(&token, "[REDACTED]"); + return Err(GitHubAppError::GitOperation(format!( + "git clone failed: {redacted_stderr}" + ))); + } + + // Fetch the specific commit (in case it's not in shallow clone) + let output = Command::new("git") + .args(["fetch", "--depth", "1", "origin", head_sha]) + .current_dir(temp_dir.path()) + .output() + .await + .map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + GitHubAppError::GitOperation("git is not installed or not in PATH".to_string()) + } else { + GitHubAppError::GitOperation(format!("Failed to run git fetch: {e}")) + } + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let redacted_stderr = stderr.replace(&token, "[REDACTED]"); + return Err(GitHubAppError::GitOperation(format!( + "git fetch failed: {redacted_stderr}" + ))); + } + + // Checkout the specific commit + let output = Command::new("git") + .args(["checkout", head_sha]) + .current_dir(temp_dir.path()) + .output() + .await + .map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + GitHubAppError::GitOperation("git is not installed or not in PATH".to_string()) + } else { + GitHubAppError::GitOperation(format!("Failed to run git checkout: {e}")) + } + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(GitHubAppError::GitOperation(format!( + "git checkout failed: {stderr}" + ))); + } + + info!(owner, repo, head_sha, "Repository cloned successfully"); + Ok(temp_dir) + } + + /// Get details about a pull request + pub async fn get_pr_details( + &self, + installation_id: i64, + owner: &str, + repo: &str, + pr_number: u64, + ) -> Result { + let token = self.get_installation_token(installation_id).await?; + + let url = format!( + "{}/repos/{}/{}/pulls/{}", + GITHUB_API_BASE, owner, repo, pr_number + ); + + let response = self + .client + .get(&url) + .header("Authorization", format!("Bearer {}", token)) + .header("Accept", "application/vnd.github+json") + .header("User-Agent", USER_AGENT) + .header("X-GitHub-Api-Version", "2022-11-28") + .send() + .await?; + + if !response.status().is_success() { + let status = response.status().as_u16(); + let message = response.text().await.unwrap_or_default(); + return Err(GitHubAppError::Api { status, message }); + } + + let pr: PrDetails = response.json().await?; + Ok(pr) + } +} diff --git a/crates/remote/src/github_app/webhook.rs b/crates/remote/src/github_app/webhook.rs new file mode 100644 index 00000000..51bfd6f7 --- /dev/null +++ b/crates/remote/src/github_app/webhook.rs @@ -0,0 +1,80 @@ +use hmac::{Hmac, Mac}; +use sha2::Sha256; +use subtle::ConstantTimeEq; + +type HmacSha256 = Hmac; + +/// Verify a GitHub webhook signature. +/// +/// GitHub sends the HMAC-SHA256 signature in the `X-Hub-Signature-256` header +/// in the format `sha256=`. +/// +/// Returns true if the signature is valid. +pub fn verify_webhook_signature(secret: &[u8], signature_header: &str, payload: &[u8]) -> bool { + // Extract the hex signature from the header + let Some(hex_signature) = signature_header.strip_prefix("sha256=") else { + return false; + }; + + // Decode the hex signature + let Ok(expected_signature) = hex::decode(hex_signature) else { + return false; + }; + + // Compute HMAC-SHA256 + let Ok(mut mac) = HmacSha256::new_from_slice(secret) else { + return false; + }; + mac.update(payload); + let computed_signature = mac.finalize().into_bytes(); + + // Constant-time comparison to prevent timing attacks + computed_signature[..].ct_eq(&expected_signature).into() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_valid_signature() { + let secret = b"test-secret"; + let payload = b"test payload"; + + // Compute expected signature + let mut mac = HmacSha256::new_from_slice(secret).unwrap(); + mac.update(payload); + let signature = mac.finalize().into_bytes(); + let signature_header = format!("sha256={}", hex::encode(signature)); + + assert!(verify_webhook_signature(secret, &signature_header, payload)); + } + + #[test] + fn test_invalid_signature() { + let secret = b"test-secret"; + let payload = b"test payload"; + let wrong_signature = + "sha256=0000000000000000000000000000000000000000000000000000000000000000"; + + assert!(!verify_webhook_signature(secret, wrong_signature, payload)); + } + + #[test] + fn test_missing_prefix() { + let secret = b"test-secret"; + let payload = b"test payload"; + let no_prefix = "0000000000000000000000000000000000000000000000000000000000000000"; + + assert!(!verify_webhook_signature(secret, no_prefix, payload)); + } + + #[test] + fn test_invalid_hex() { + let secret = b"test-secret"; + let payload = b"test payload"; + let invalid_hex = "sha256=not-valid-hex"; + + assert!(!verify_webhook_signature(secret, invalid_hex, payload)); + } +} diff --git a/crates/remote/src/lib.rs b/crates/remote/src/lib.rs index 23c17839..3a65487d 100644 --- a/crates/remote/src/lib.rs +++ b/crates/remote/src/lib.rs @@ -2,7 +2,9 @@ mod app; mod auth; pub mod config; pub mod db; +pub mod github_app; pub mod mail; +pub mod r2; pub mod routes; mod state; pub mod validated_where; diff --git a/crates/remote/src/mail.rs b/crates/remote/src/mail.rs index f8616dea..0ae806c7 100644 --- a/crates/remote/src/mail.rs +++ b/crates/remote/src/mail.rs @@ -6,6 +6,8 @@ use serde_json::json; use crate::db::organization_members::MemberRole; const LOOPS_INVITE_TEMPLATE_ID: &str = "cmhvy2wgs3s13z70i1pxakij9"; +const LOOPS_REVIEW_READY_TEMPLATE_ID: &str = "cmj47k5ge16990iylued9by17"; +const LOOPS_REVIEW_FAILED_TEMPLATE_ID: &str = "cmj49ougk1c8s0iznavijdqpo"; #[async_trait] pub trait Mailer: Send + Sync { @@ -17,6 +19,10 @@ pub trait Mailer: Send + Sync { role: MemberRole, invited_by: Option<&str>, ); + + async fn send_review_ready(&self, email: &str, review_url: &str, pr_name: &str); + + async fn send_review_failed(&self, email: &str, pr_name: &str, review_id: &str); } pub struct LoopsMailer { @@ -93,4 +99,86 @@ impl Mailer for LoopsMailer { } } } + + async fn send_review_ready(&self, email: &str, review_url: &str, pr_name: &str) { + if cfg!(debug_assertions) { + tracing::info!( + "Sending review ready email to {email}\n\ + PR: {pr_name}\n\ + Review URL: {review_url}" + ); + } + + let payload = json!({ + "transactionalId": LOOPS_REVIEW_READY_TEMPLATE_ID, + "email": email, + "dataVariables": { + "review_url": review_url, + "pr_name": pr_name, + } + }); + + let res = self + .client + .post("https://app.loops.so/api/v1/transactional") + .bearer_auth(&self.api_key) + .json(&payload) + .send() + .await; + + match res { + Ok(resp) if resp.status().is_success() => { + tracing::debug!("Review ready email sent via Loops to {email}"); + } + Ok(resp) => { + let status = resp.status(); + let body = resp.text().await.unwrap_or_default(); + tracing::warn!(status = %status, body = %body, "Loops send failed for review ready"); + } + Err(err) => { + tracing::error!(error = ?err, "Loops request error for review ready"); + } + } + } + + async fn send_review_failed(&self, email: &str, pr_name: &str, review_id: &str) { + if cfg!(debug_assertions) { + tracing::info!( + "Sending review failed email to {email}\n\ + PR: {pr_name}\n\ + Review ID: {review_id}" + ); + } + + let payload = json!({ + "transactionalId": LOOPS_REVIEW_FAILED_TEMPLATE_ID, + "email": email, + "dataVariables": { + "pr_name": pr_name, + "review_id": review_id, + } + }); + + let res = self + .client + .post("https://app.loops.so/api/v1/transactional") + .bearer_auth(&self.api_key) + .json(&payload) + .send() + .await; + + match res { + Ok(resp) if resp.status().is_success() => { + tracing::debug!("Review failed email sent via Loops to {email}"); + } + Ok(resp) => { + let status = resp.status(); + let body = resp.text().await.unwrap_or_default(); + tracing::warn!(status = %status, body = %body, "Loops send failed for review failed"); + } + Err(err) => { + tracing::error!(error = ?err, "Loops request error for review failed"); + } + } + } } diff --git a/crates/remote/src/r2.rs b/crates/remote/src/r2.rs new file mode 100644 index 00000000..d40e6674 --- /dev/null +++ b/crates/remote/src/r2.rs @@ -0,0 +1,134 @@ +use std::time::Duration; + +use aws_credential_types::Credentials; +use aws_sdk_s3::{ + Client, + config::{Builder as S3ConfigBuilder, IdentityCache}, + presigning::PresigningConfig, + primitives::ByteStream, +}; +use chrono::{DateTime, Utc}; +use secrecy::ExposeSecret; +use uuid::Uuid; + +use crate::config::R2Config; + +/// Well-known filename for the payload tarball stored in each review folder. +pub const PAYLOAD_FILENAME: &str = "payload.tar.gz"; + +#[derive(Clone)] +pub struct R2Service { + client: Client, + bucket: String, + presign_expiry: Duration, +} + +#[derive(Debug)] +pub struct PresignedUpload { + pub upload_url: String, + pub object_key: String, + /// Folder path in R2 (e.g., "reviews/{review_id}") - this is stored in the database. + pub folder_path: String, + pub expires_at: DateTime, +} + +#[derive(Debug, thiserror::Error)] +pub enum R2Error { + #[error("presign config error: {0}")] + PresignConfig(String), + #[error("presign error: {0}")] + Presign(String), + #[error("upload error: {0}")] + Upload(String), +} + +impl R2Service { + pub fn new(config: &R2Config) -> Self { + let credentials = Credentials::new( + &config.access_key_id, + config.secret_access_key.expose_secret(), + None, + None, + "r2-static", + ); + + let s3_config = + S3ConfigBuilder::new() + .region(aws_sdk_s3::config::Region::new("auto")) + .endpoint_url(&config.endpoint) + .credentials_provider(credentials) + .force_path_style(true) + .stalled_stream_protection( + aws_sdk_s3::config::StalledStreamProtectionConfig::disabled(), + ) + .identity_cache(IdentityCache::no_cache()) + .build(); + + let client = Client::from_conf(s3_config); + + Self { + client, + bucket: config.bucket.clone(), + presign_expiry: Duration::from_secs(config.presign_expiry_secs), + } + } + + pub async fn create_presigned_upload( + &self, + review_id: Uuid, + content_type: Option<&str>, + ) -> Result { + let folder_path = format!("reviews/{review_id}"); + let object_key = format!("{folder_path}/{PAYLOAD_FILENAME}"); + + let presigning_config = PresigningConfig::builder() + .expires_in(self.presign_expiry) + .build() + .map_err(|e| R2Error::PresignConfig(e.to_string()))?; + + let mut request = self + .client + .put_object() + .bucket(&self.bucket) + .key(&object_key); + + if let Some(ct) = content_type { + request = request.content_type(ct); + } + + let presigned = request + .presigned(presigning_config) + .await + .map_err(|e| R2Error::Presign(e.to_string()))?; + + let expires_at = Utc::now() + + chrono::Duration::from_std(self.presign_expiry).unwrap_or(chrono::Duration::hours(1)); + + Ok(PresignedUpload { + upload_url: presigned.uri().to_string(), + object_key, + folder_path, + expires_at, + }) + } + + /// Upload bytes directly to R2 (for server-side uploads). + /// + /// Returns the folder path (e.g., "reviews/{review_id}") to store in the database. + pub async fn upload_bytes(&self, review_id: Uuid, data: Vec) -> Result { + let folder_path = format!("reviews/{review_id}"); + let object_key = format!("{folder_path}/{PAYLOAD_FILENAME}"); + + self.client + .put_object() + .bucket(&self.bucket) + .key(&object_key) + .body(ByteStream::from(data)) + .content_type("application/gzip") + .send() + .await + .map_err(|e| R2Error::Upload(e.to_string()))?; + + Ok(folder_path) + } +} diff --git a/crates/remote/src/routes/github_app.rs b/crates/remote/src/routes/github_app.rs new file mode 100644 index 00000000..200424d7 --- /dev/null +++ b/crates/remote/src/routes/github_app.rs @@ -0,0 +1,821 @@ +use axum::{ + Json, Router, + body::Bytes, + extract::{Path, Query, State}, + http::{HeaderMap, StatusCode}, + response::{IntoResponse, Redirect, Response}, + routing::{delete, get, post}, +}; +use chrono::{Duration, Utc}; +use secrecy::ExposeSecret; +use serde::{Deserialize, Serialize}; +use tracing::{error, info, warn}; +use uuid::Uuid; + +use super::error::ErrorResponse; +use crate::{ + AppState, + auth::RequestContext, + db::{ + github_app::GitHubAppRepository2, identity_errors::IdentityError, + organizations::OrganizationRepository, + }, + github_app::{PrReviewParams, PrReviewService, verify_webhook_signature}, +}; + +// ========== Public Routes ========== + +pub fn public_router() -> Router { + Router::new() + .route("/github/webhook", post(handle_webhook)) + .route("/github/app/callback", get(handle_callback)) +} + +// ========== Protected Routes ========== + +pub fn protected_router() -> Router { + Router::new() + .route( + "/organizations/{org_id}/github-app/install-url", + get(get_install_url), + ) + .route("/organizations/{org_id}/github-app/status", get(get_status)) + .route("/organizations/{org_id}/github-app", delete(uninstall)) + .route("/debug/pr-review/trigger", post(trigger_pr_review)) +} + +// ========== Types ========== + +#[derive(Debug, Serialize)] +pub struct InstallUrlResponse { + pub install_url: String, +} + +#[derive(Debug, Serialize)] +pub struct GitHubAppStatusResponse { + pub installed: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub installation: Option, + pub repositories: Vec, +} + +#[derive(Debug, Serialize)] +pub struct InstallationDetails { + pub id: String, + pub github_installation_id: i64, + pub github_account_login: String, + pub github_account_type: String, + pub repository_selection: String, + pub suspended_at: Option, + pub created_at: String, +} + +#[derive(Debug, Serialize)] +pub struct RepositoryDetails { + pub id: String, + pub github_repo_id: i64, + pub repo_full_name: String, +} + +#[derive(Debug, Deserialize)] +pub struct CallbackQuery { + pub installation_id: Option, + pub state: Option, +} + +#[derive(Debug, Deserialize)] +pub struct TriggerPrReviewRequest { + /// GitHub PR URL, e.g., "https://github.com/owner/repo/pull/123" + pub pr_url: String, +} + +#[derive(Debug, Serialize)] +pub struct TriggerPrReviewResponse { + pub review_id: Uuid, +} + +// ========== Protected Route Handlers ========== + +/// GET /v1/organizations/:org_id/github-app/install-url +/// Returns URL to install the GitHub App for this organization +pub async fn get_install_url( + State(state): State, + axum::extract::Extension(ctx): axum::extract::Extension, + Path(org_id): Path, +) -> Result { + // Check GitHub App is configured + let github_app = state.github_app().ok_or_else(|| { + ErrorResponse::new(StatusCode::NOT_IMPLEMENTED, "GitHub App not configured") + })?; + + // Check user is admin of organization + let org_repo = OrganizationRepository::new(state.pool()); + org_repo + .assert_admin(org_id, ctx.user.id) + .await + .map_err(|e| match e { + IdentityError::PermissionDenied => { + ErrorResponse::new(StatusCode::FORBIDDEN, "Admin access required") + } + IdentityError::NotFound => { + ErrorResponse::new(StatusCode::NOT_FOUND, "Organization not found") + } + _ => ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"), + })?; + + // Check not a personal org + let is_personal = org_repo + .is_personal(org_id) + .await + .map_err(|_| ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"))?; + + if is_personal { + return Err(ErrorResponse::new( + StatusCode::BAD_REQUEST, + "GitHub App cannot be installed on personal organizations", + )); + } + + // Generate state token (simple format: org_id:user_id:timestamp) + // In production, you'd want to sign this with HMAC + let expires_at = Utc::now() + Duration::minutes(10); + let state_token = format!("{}:{}:{}", org_id, ctx.user.id, expires_at.timestamp()); + + // Store pending installation + let gh_repo = GitHubAppRepository2::new(state.pool()); + gh_repo + .create_pending(org_id, ctx.user.id, &state_token, expires_at) + .await + .map_err(|e| { + error!(?e, "Failed to create pending installation"); + ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error") + })?; + + // Build installation URL + let install_url = format!( + "https://github.com/apps/{}/installations/new?state={}", + github_app.app_slug(), + urlencoding::encode(&state_token) + ); + + Ok(Json(InstallUrlResponse { install_url })) +} + +/// GET /v1/organizations/:org_id/github-app/status +/// Returns the GitHub App installation status for this organization +pub async fn get_status( + State(state): State, + axum::extract::Extension(ctx): axum::extract::Extension, + Path(org_id): Path, +) -> Result { + // Check user is member of organization + let org_repo = OrganizationRepository::new(state.pool()); + org_repo + .assert_membership(org_id, ctx.user.id) + .await + .map_err(|e| match e { + IdentityError::PermissionDenied | IdentityError::NotFound => { + ErrorResponse::new(StatusCode::FORBIDDEN, "Access denied") + } + _ => ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"), + })?; + + let gh_repo = GitHubAppRepository2::new(state.pool()); + + let installation = gh_repo.get_by_organization(org_id).await.map_err(|e| { + error!(?e, "Failed to get GitHub App installation"); + ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error") + })?; + + match installation { + Some(inst) => { + let repositories = gh_repo.get_repositories(inst.id).await.map_err(|e| { + error!(?e, "Failed to get repositories"); + ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error") + })?; + + Ok(Json(GitHubAppStatusResponse { + installed: true, + installation: Some(InstallationDetails { + id: inst.id.to_string(), + github_installation_id: inst.github_installation_id, + github_account_login: inst.github_account_login, + github_account_type: inst.github_account_type, + repository_selection: inst.repository_selection, + suspended_at: inst.suspended_at.map(|t| t.to_rfc3339()), + created_at: inst.created_at.to_rfc3339(), + }), + repositories: repositories + .into_iter() + .map(|r| RepositoryDetails { + id: r.id.to_string(), + github_repo_id: r.github_repo_id, + repo_full_name: r.repo_full_name, + }) + .collect(), + })) + } + None => Ok(Json(GitHubAppStatusResponse { + installed: false, + installation: None, + repositories: vec![], + })), + } +} + +/// DELETE /v1/organizations/:org_id/github-app +/// Removes the local installation record (does not uninstall from GitHub) +pub async fn uninstall( + State(state): State, + axum::extract::Extension(ctx): axum::extract::Extension, + Path(org_id): Path, +) -> Result { + // Check user is admin of organization + let org_repo = OrganizationRepository::new(state.pool()); + org_repo + .assert_admin(org_id, ctx.user.id) + .await + .map_err(|e| match e { + IdentityError::PermissionDenied => { + ErrorResponse::new(StatusCode::FORBIDDEN, "Admin access required") + } + IdentityError::NotFound => { + ErrorResponse::new(StatusCode::NOT_FOUND, "Organization not found") + } + _ => ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"), + })?; + + let gh_repo = GitHubAppRepository2::new(state.pool()); + gh_repo.delete_by_organization(org_id).await.map_err(|e| { + error!(?e, "Failed to delete GitHub App installation"); + ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error") + })?; + + info!(org_id = %org_id, user_id = %ctx.user.id, "GitHub App installation removed"); + Ok(StatusCode::NO_CONTENT) +} + +// ========== Public Route Handlers ========== + +/// GET /v1/github/app/callback +/// Handles redirect from GitHub after app installation +pub async fn handle_callback( + State(state): State, + Query(query): Query, +) -> Response { + let frontend_base = state + .config + .server_public_base_url + .clone() + .unwrap_or_else(|| "http://localhost:3000".to_string()); + + // Helper to redirect with error + let redirect_error = |org_id: Option, error: &str| -> Response { + let url = match org_id { + Some(id) => format!( + "{}/account/organizations/{}?github_app_error={}", + frontend_base, + id, + urlencoding::encode(error) + ), + None => format!( + "{}/account?github_app_error={}", + frontend_base, + urlencoding::encode(error) + ), + }; + Redirect::temporary(&url).into_response() + }; + + // Check GitHub App is configured + let Some(github_app) = state.github_app() else { + return redirect_error(None, "GitHub App not configured"); + }; + + // Validate required params + let Some(installation_id) = query.installation_id else { + return redirect_error(None, "Missing installation_id"); + }; + + let Some(state_token) = query.state else { + return redirect_error(None, "Missing state parameter"); + }; + + // Parse state token: org_id:user_id:timestamp + let parts: Vec<&str> = state_token.split(':').collect(); + if parts.len() != 3 { + return redirect_error(None, "Invalid state token format"); + } + + let Ok(org_id) = Uuid::parse_str(parts[0]) else { + return redirect_error(None, "Invalid organization ID in state"); + }; + + let Ok(user_id) = Uuid::parse_str(parts[1]) else { + return redirect_error(Some(org_id), "Invalid user ID in state"); + }; + + let Ok(timestamp) = parts[2].parse::() else { + return redirect_error(Some(org_id), "Invalid timestamp in state"); + }; + + // Check expiry + if Utc::now().timestamp() > timestamp { + return redirect_error(Some(org_id), "Installation link expired"); + } + + // Verify pending installation exists + let gh_repo = GitHubAppRepository2::new(state.pool()); + let pending = match gh_repo.get_pending_by_state(&state_token).await { + Ok(Some(p)) => p, + Ok(None) => { + return redirect_error(Some(org_id), "Installation not found or expired"); + } + Err(e) => { + error!(?e, "Failed to get pending installation"); + return redirect_error(Some(org_id), "Database error"); + } + }; + + // Fetch installation details from GitHub + let installation_info = match github_app.get_installation(installation_id).await { + Ok(info) => info, + Err(e) => { + error!(?e, "Failed to get installation from GitHub"); + return redirect_error(Some(org_id), "Failed to verify installation with GitHub"); + } + }; + + // Create installation record + if let Err(e) = gh_repo + .create_installation( + pending.organization_id, + installation_id, + &installation_info.account.login, + &installation_info.account.account_type, + &installation_info.repository_selection, + user_id, + ) + .await + { + error!(?e, "Failed to create installation record"); + return redirect_error(Some(org_id), "Failed to save installation"); + } + + // Delete pending record + if let Err(e) = gh_repo.delete_pending(&state_token).await { + warn!(?e, "Failed to delete pending installation record"); + } + + // Fetch and store repositories if selection is "selected" + if installation_info.repository_selection == "selected" + && let Ok(repos) = github_app.list_installation_repos(installation_id).await + { + let installation = gh_repo + .get_by_github_id(installation_id) + .await + .ok() + .flatten(); + if let Some(inst) = installation { + let repo_data: Vec<(i64, String)> = + repos.into_iter().map(|r| (r.id, r.full_name)).collect(); + if let Err(e) = gh_repo.sync_repositories(inst.id, &repo_data).await { + warn!(?e, "Failed to sync repositories"); + } + } + } + + info!( + org_id = %org_id, + installation_id = installation_id, + account = %installation_info.account.login, + "GitHub App installed successfully" + ); + + // Redirect to organization page with success + let url = format!( + "{}/account/organizations/{}?github_app=installed", + frontend_base, org_id + ); + Redirect::temporary(&url).into_response() +} + +/// POST /v1/github/webhook +/// Handles webhook events from GitHub +pub async fn handle_webhook( + State(state): State, + headers: HeaderMap, + body: Bytes, +) -> Response { + // Check GitHub App is configured + let Some(github_app) = state.github_app() else { + warn!("Received webhook but GitHub App not configured"); + return StatusCode::NOT_IMPLEMENTED.into_response(); + }; + + // Verify signature + let signature = headers + .get("X-Hub-Signature-256") + .and_then(|v| v.to_str().ok()) + .unwrap_or(""); + + if !verify_webhook_signature( + github_app.webhook_secret().expose_secret().as_bytes(), + signature, + &body, + ) { + warn!("Invalid webhook signature"); + return StatusCode::UNAUTHORIZED.into_response(); + } + + // Get event type + let event_type = headers + .get("X-GitHub-Event") + .and_then(|v| v.to_str().ok()) + .unwrap_or("unknown"); + + info!(event_type, "Received GitHub webhook"); + + // Parse payload + let payload: serde_json::Value = match serde_json::from_slice(&body) { + Ok(v) => v, + Err(e) => { + warn!(?e, "Failed to parse webhook payload"); + return StatusCode::BAD_REQUEST.into_response(); + } + }; + + // Handle different event types + match event_type { + "installation" => handle_installation_event(&state, &payload).await, + "installation_repositories" => handle_installation_repos_event(&state, &payload).await, + "pull_request" => handle_pull_request_event(&state, github_app, &payload).await, + _ => { + info!(event_type, "Ignoring unhandled webhook event"); + StatusCode::OK.into_response() + } + } +} + +// ========== Webhook Event Handlers ========== + +async fn handle_installation_event(state: &AppState, payload: &serde_json::Value) -> Response { + let action = payload["action"].as_str().unwrap_or(""); + let installation_id = payload["installation"]["id"].as_i64().unwrap_or(0); + + info!(action, installation_id, "Processing installation event"); + + let gh_repo = GitHubAppRepository2::new(state.pool()); + + match action { + "deleted" => { + if let Err(e) = gh_repo.delete_by_github_id(installation_id).await { + error!(?e, "Failed to delete installation"); + } else { + info!(installation_id, "Installation deleted"); + } + } + "suspend" => { + if let Err(e) = gh_repo.suspend(installation_id).await { + error!(?e, "Failed to suspend installation"); + } else { + info!(installation_id, "Installation suspended"); + } + } + "unsuspend" => { + if let Err(e) = gh_repo.unsuspend(installation_id).await { + error!(?e, "Failed to unsuspend installation"); + } else { + info!(installation_id, "Installation unsuspended"); + } + } + "created" => { + // Installation created via webhook (without going through our flow) + // This shouldn't happen if orphan installations are rejected + info!( + installation_id, + "Installation created event received (orphan)" + ); + } + _ => { + info!(action, "Ignoring installation action"); + } + } + + StatusCode::OK.into_response() +} + +async fn handle_installation_repos_event( + state: &AppState, + payload: &serde_json::Value, +) -> Response { + let action = payload["action"].as_str().unwrap_or(""); + let installation_id = payload["installation"]["id"].as_i64().unwrap_or(0); + + info!( + action, + installation_id, "Processing installation_repositories event" + ); + + let gh_repo = GitHubAppRepository2::new(state.pool()); + + // Get our installation record + let installation = match gh_repo.get_by_github_id(installation_id).await { + Ok(Some(inst)) => inst, + Ok(None) => { + info!(installation_id, "Installation not found, ignoring"); + return StatusCode::OK.into_response(); + } + Err(e) => { + error!(?e, "Failed to get installation"); + return StatusCode::OK.into_response(); + } + }; + + match action { + "added" => { + let repos: Vec<(i64, String)> = payload["repositories_added"] + .as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|r| { + let id = r["id"].as_i64()?; + let name = r["full_name"].as_str()?; + Some((id, name.to_string())) + }) + .collect(); + + if let Err(e) = gh_repo.add_repositories(installation.id, &repos).await { + error!(?e, "Failed to add repositories"); + } else { + info!(installation_id, count = repos.len(), "Repositories added"); + } + } + "removed" => { + let repo_ids: Vec = payload["repositories_removed"] + .as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|r| r["id"].as_i64()) + .collect(); + + if let Err(e) = gh_repo + .remove_repositories(installation.id, &repo_ids) + .await + { + error!(?e, "Failed to remove repositories"); + } else { + info!( + installation_id, + count = repo_ids.len(), + "Repositories removed" + ); + } + } + _ => { + info!(action, "Ignoring repositories action"); + } + } + + // Update repository selection if changed + let new_selection = payload["repository_selection"].as_str().unwrap_or(""); + if !new_selection.is_empty() + && new_selection != installation.repository_selection + && let Err(e) = gh_repo + .update_repository_selection(installation_id, new_selection) + .await + { + error!(?e, "Failed to update repository selection"); + } + + StatusCode::OK.into_response() +} + +async fn handle_pull_request_event( + state: &AppState, + github_app: &crate::github_app::GitHubAppService, + payload: &serde_json::Value, +) -> Response { + use crate::github_app::{PrReviewParams, PrReviewService}; + + let action = payload["action"].as_str().unwrap_or(""); + + // Only handle opened PRs + if action != "opened" { + return StatusCode::OK.into_response(); + } + + let installation_id = payload["installation"]["id"].as_i64().unwrap_or(0); + let pr_number = payload["pull_request"]["number"].as_u64().unwrap_or(0); + let repo_owner = payload["repository"]["owner"]["login"] + .as_str() + .unwrap_or(""); + let repo_name = payload["repository"]["name"].as_str().unwrap_or(""); + + info!( + installation_id, + pr_number, repo_owner, repo_name, "Processing pull_request.opened event" + ); + + // Check if we have this installation + let gh_repo = GitHubAppRepository2::new(state.pool()); + let installation = match gh_repo.get_by_github_id(installation_id).await { + Ok(Some(inst)) => inst, + Ok(None) => { + info!(installation_id, "Installation not found, ignoring PR"); + return StatusCode::OK.into_response(); + } + Err(e) => { + error!(?e, "Failed to get installation"); + return StatusCode::OK.into_response(); + } + }; + + // Check if installation is suspended + if installation.suspended_at.is_some() { + info!(installation_id, "Installation is suspended, ignoring PR"); + return StatusCode::OK.into_response(); + } + + // Check if R2 and review worker are configured + let Some(r2) = state.r2() else { + info!("R2 not configured, skipping PR review"); + return StatusCode::OK.into_response(); + }; + + let Some(worker_base_url) = state.config.review_worker_base_url.as_ref() else { + info!("Review worker not configured, skipping PR review"); + return StatusCode::OK.into_response(); + }; + + // Extract PR metadata from payload + let pr_title = payload["pull_request"]["title"] + .as_str() + .unwrap_or("Untitled PR") + .to_string(); + let pr_body = payload["pull_request"]["body"] + .as_str() + .unwrap_or("") + .to_string(); + let head_sha = payload["pull_request"]["head"]["sha"] + .as_str() + .unwrap_or("") + .to_string(); + let base_sha = payload["pull_request"]["base"]["sha"] + .as_str() + .unwrap_or("") + .to_string(); + + // Spawn async task to process PR review + let github_app_clone = github_app.clone(); + let r2_clone = r2.clone(); + let http_client = state.http_client.clone(); + let worker_url = worker_base_url.clone(); + let server_url = state.server_public_base_url.clone(); + let pool = state.pool.clone(); + let repo_owner = repo_owner.to_string(); + let repo_name = repo_name.to_string(); + + tokio::spawn(async move { + let service = PrReviewService::new( + github_app_clone, + r2_clone, + http_client, + worker_url, + server_url, + ); + + let params = PrReviewParams { + installation_id, + owner: repo_owner.clone(), + repo: repo_name.clone(), + pr_number, + pr_title, + pr_body, + head_sha, + base_sha, + }; + + if let Err(e) = service.process_pr_review(&pool, params).await { + error!( + ?e, + installation_id, pr_number, repo_owner, repo_name, "Failed to start PR review" + ); + } + }); + + StatusCode::OK.into_response() +} + +// ========== Debug Endpoint ========== + +/// Parse a GitHub PR URL into (owner, repo, pr_number) +fn parse_pr_url(url: &str) -> Option<(String, String, u64)> { + // Parse URLs like: https://github.com/owner/repo/pull/123 + let url = url.trim_end_matches('/'); + let parts: Vec<&str> = url.split('/').collect(); + + // Find "github.com" and get owner/repo/pull/number + let github_idx = parts.iter().position(|&p| p == "github.com")?; + + if parts.len() < github_idx + 5 { + return None; + } + + let owner = parts[github_idx + 1].to_string(); + let repo = parts[github_idx + 2].to_string(); + + if parts[github_idx + 3] != "pull" { + return None; + } + + let pr_number: u64 = parts[github_idx + 4].parse().ok()?; + + Some((owner, repo, pr_number)) +} + +/// POST /v1/debug/pr-review/trigger +/// Manually trigger a PR review for debugging purposes +pub async fn trigger_pr_review( + State(state): State, + Json(payload): Json, +) -> Result, ErrorResponse> { + // 1. Parse PR URL + let (owner, repo, pr_number) = parse_pr_url(&payload.pr_url) + .ok_or_else(|| ErrorResponse::new(StatusCode::BAD_REQUEST, "Invalid PR URL format"))?; + + // 2. Validate services are configured + let github_app = state.github_app().ok_or_else(|| { + ErrorResponse::new(StatusCode::SERVICE_UNAVAILABLE, "GitHub App not configured") + })?; + let r2 = state + .r2() + .ok_or_else(|| ErrorResponse::new(StatusCode::SERVICE_UNAVAILABLE, "R2 not configured"))?; + let worker_base_url = state + .config + .review_worker_base_url + .as_ref() + .ok_or_else(|| { + ErrorResponse::new( + StatusCode::SERVICE_UNAVAILABLE, + "Review worker not configured", + ) + })?; + + // 3. Look up installation by owner + let gh_repo = GitHubAppRepository2::new(state.pool()); + let installation = gh_repo + .get_by_account_login(&owner) + .await + .map_err(|e| ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))? + .ok_or_else(|| { + ErrorResponse::new( + StatusCode::NOT_FOUND, + format!("No installation found for {}", owner), + ) + })?; + + // 4. Fetch PR details from GitHub API + let pr_details = github_app + .get_pr_details( + installation.github_installation_id, + &owner, + &repo, + pr_number, + ) + .await + .map_err(|e| ErrorResponse::new(StatusCode::BAD_GATEWAY, e.to_string()))?; + + // 5. Create service and process review + let service = PrReviewService::new( + github_app.clone(), + r2.clone(), + state.http_client.clone(), + worker_base_url.clone(), + state.server_public_base_url.clone(), + ); + + let params = PrReviewParams { + installation_id: installation.github_installation_id, + owner, + repo, + pr_number, + pr_title: pr_details.title, + pr_body: pr_details.body.unwrap_or_default(), + head_sha: pr_details.head.sha, + base_sha: pr_details.base.sha, + }; + + let review_id = service + .process_pr_review(state.pool(), params) + .await + .map_err(|e| ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?; + + info!( + review_id = %review_id, + pr_url = %payload.pr_url, + "Manual PR review triggered" + ); + + Ok(Json(TriggerPrReviewResponse { review_id })) +} diff --git a/crates/remote/src/routes/mod.rs b/crates/remote/src/routes/mod.rs index 8ba11007..b56b2e1c 100644 --- a/crates/remote/src/routes/mod.rs +++ b/crates/remote/src/routes/mod.rs @@ -16,11 +16,13 @@ use crate::{AppState, auth::require_session}; mod electric_proxy; mod error; +mod github_app; mod identity; mod oauth; pub(crate) mod organization_members; mod organizations; mod projects; +mod review; pub mod tasks; mod tokens; @@ -49,7 +51,9 @@ pub fn router(state: AppState) -> Router { .route("/health", get(health)) .merge(oauth::public_router()) .merge(organization_members::public_router()) - .merge(tokens::public_router()); + .merge(tokens::public_router()) + .merge(review::public_router()) + .merge(github_app::public_router()); let v1_protected = Router::::new() .merge(identity::router()) @@ -59,6 +63,7 @@ pub fn router(state: AppState) -> Router { .merge(organization_members::protected_router()) .merge(oauth::protected_router()) .merge(electric_proxy::router()) + .merge(github_app::protected_router()) .layer(middleware::from_fn_with_state( state.clone(), require_session, diff --git a/crates/remote/src/routes/review.rs b/crates/remote/src/routes/review.rs new file mode 100644 index 00000000..ba2d635a --- /dev/null +++ b/crates/remote/src/routes/review.rs @@ -0,0 +1,475 @@ +use std::net::IpAddr; + +use axum::{ + Json, Router, + body::Body, + extract::{Path, State}, + http::{HeaderMap, StatusCode}, + response::{IntoResponse, Response}, + routing::{get, post}, +}; +use chrono::{DateTime, Duration, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +use crate::{ + AppState, + db::reviews::{CreateReviewParams, ReviewRepository}, + r2::R2Error, +}; + +pub fn public_router() -> Router { + Router::new() + .route("/review/init", post(init_review_upload)) + .route("/review/start", post(start_review)) + .route("/review/{id}/status", get(get_review_status)) + .route("/review/{id}", get(get_review)) + .route("/review/{id}/metadata", get(get_review_metadata)) + .route("/review/{id}/file/{file_hash}", get(get_review_file)) + .route("/review/{id}/diff", get(get_review_diff)) + .route("/review/{id}/success", post(review_success)) + .route("/review/{id}/failed", post(review_failed)) +} + +#[derive(Debug, Deserialize)] +pub struct InitReviewRequest { + pub gh_pr_url: String, + pub email: String, + pub pr_title: String, + #[serde(default)] + pub claude_code_session_id: Option, + #[serde(default)] + pub content_type: Option, +} + +#[derive(Debug, Serialize)] +pub struct InitReviewResponse { + pub review_id: Uuid, + pub upload_url: String, + pub object_key: String, + pub expires_at: DateTime, +} + +#[derive(Debug, Serialize)] +pub struct ReviewMetadataResponse { + pub gh_pr_url: String, + pub pr_title: String, +} + +#[derive(Debug, thiserror::Error)] +pub enum ReviewError { + #[error("R2 storage not configured")] + NotConfigured, + #[error("failed to generate upload URL: {0}")] + R2Error(#[from] R2Error), + #[error("rate limit exceeded")] + RateLimited, + #[error("unable to determine client IP")] + MissingClientIp, + #[error("database error: {0}")] + Database(#[from] crate::db::reviews::ReviewError), + #[error("review worker not configured")] + WorkerNotConfigured, + #[error("review worker request failed: {0}")] + WorkerError(#[from] reqwest::Error), + #[error("invalid review ID")] + InvalidReviewId, +} + +impl IntoResponse for ReviewError { + fn into_response(self) -> Response { + let (status, message) = match &self { + ReviewError::NotConfigured => ( + StatusCode::SERVICE_UNAVAILABLE, + "Review upload service not available", + ), + ReviewError::R2Error(e) => { + tracing::error!(error = %e, "R2 presign failed"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Failed to generate upload URL", + ) + } + ReviewError::RateLimited => ( + StatusCode::TOO_MANY_REQUESTS, + "Rate limit exceeded. Try again later.", + ), + ReviewError::MissingClientIp => { + (StatusCode::BAD_REQUEST, "Unable to determine client IP") + } + ReviewError::Database(crate::db::reviews::ReviewError::NotFound) => { + (StatusCode::NOT_FOUND, "Review not found") + } + ReviewError::Database(e) => { + tracing::error!(error = %e, "Database error in review"); + (StatusCode::INTERNAL_SERVER_ERROR, "Internal server error") + } + ReviewError::WorkerNotConfigured => ( + StatusCode::SERVICE_UNAVAILABLE, + "Review worker service not available", + ), + ReviewError::WorkerError(e) => { + tracing::error!(error = %e, "Review worker request failed"); + ( + StatusCode::BAD_GATEWAY, + "Failed to fetch review from worker", + ) + } + ReviewError::InvalidReviewId => (StatusCode::BAD_REQUEST, "Invalid review ID"), + }; + + let body = serde_json::json!({ + "error": message + }); + + (status, Json(body)).into_response() + } +} + +/// Extract client IP from headers, with fallbacks for local development +fn extract_client_ip(headers: &HeaderMap) -> Option { + // Try Cloudflare header first (production) + if let Some(ip) = headers + .get("CF-Connecting-IP") + .and_then(|v| v.to_str().ok()) + .and_then(|s| s.parse().ok()) + { + return Some(ip); + } + + // Fallback to X-Forwarded-For (common proxy header) + if let Some(ip) = headers + .get("X-Forwarded-For") + .and_then(|v| v.to_str().ok()) + .and_then(|s| s.split(',').next()) // Take first IP in chain + .and_then(|s| s.trim().parse().ok()) + { + return Some(ip); + } + + // Fallback to X-Real-IP + if let Some(ip) = headers + .get("X-Real-IP") + .and_then(|v| v.to_str().ok()) + .and_then(|s| s.parse().ok()) + { + return Some(ip); + } + + // For local development, use localhost + Some(IpAddr::V4(std::net::Ipv4Addr::LOCALHOST)) +} + +/// Check rate limits for the given IP address. +/// Limits: 2 reviews per minute, 20 reviews per hour. +async fn check_rate_limit(repo: &ReviewRepository<'_>, ip: IpAddr) -> Result<(), ReviewError> { + let now = Utc::now(); + + // Check minute limit (2 per minute) + let minute_ago = now - Duration::minutes(1); + let minute_count = repo.count_since(ip, minute_ago).await?; + if minute_count >= 2 { + return Err(ReviewError::RateLimited); + } + + // Check hour limit (20 per hour) + let hour_ago = now - Duration::hours(1); + let hour_count = repo.count_since(ip, hour_ago).await?; + if hour_count >= 20 { + return Err(ReviewError::RateLimited); + } + + Ok(()) +} + +pub async fn init_review_upload( + State(state): State, + headers: HeaderMap, + Json(payload): Json, +) -> Result, ReviewError> { + // 1. Generate the review ID upfront (used in both R2 path and DB record) + let review_id = Uuid::new_v4(); + + // 2. Extract IP (required for rate limiting) + let ip = extract_client_ip(&headers).ok_or(ReviewError::MissingClientIp)?; + + // 3. Check rate limits + let repo = ReviewRepository::new(state.pool()); + check_rate_limit(&repo, ip).await?; + + // 4. Get R2 service + let r2 = state.r2().ok_or(ReviewError::NotConfigured)?; + + // 5. Generate presigned URL with review ID in path + let content_type = payload.content_type.as_deref(); + let upload = r2.create_presigned_upload(review_id, content_type).await?; + + // 6. Insert DB record with the same review ID, storing folder path + let review = repo + .create(CreateReviewParams { + id: review_id, + gh_pr_url: &payload.gh_pr_url, + claude_code_session_id: payload.claude_code_session_id.as_deref(), + ip_address: ip, + r2_path: &upload.folder_path, + email: &payload.email, + pr_title: &payload.pr_title, + }) + .await?; + + // 7. Return response with review_id + Ok(Json(InitReviewResponse { + review_id: review.id, + upload_url: upload.upload_url, + object_key: upload.object_key, + expires_at: upload.expires_at, + })) +} + +/// Proxy a request to the review worker and return the response. +async fn proxy_to_worker(state: &AppState, path: &str) -> Result { + let base_url = state + .config + .review_worker_base_url + .as_ref() + .ok_or(ReviewError::WorkerNotConfigured)?; + + let url = format!("{}{}", base_url.trim_end_matches('/'), path); + + let response = state.http_client.get(&url).send().await?; + + let status = response.status(); + let headers = response.headers().clone(); + let bytes = response.bytes().await?; + + let mut builder = Response::builder().status(status); + + // Copy relevant headers from the worker response + if let Some(content_type) = headers.get("content-type") { + builder = builder.header("content-type", content_type); + } + + Ok(builder.body(Body::from(bytes)).unwrap()) +} + +/// Proxy a POST request with JSON body to the review worker +async fn proxy_post_to_worker( + state: &AppState, + path: &str, + body: serde_json::Value, +) -> Result { + let base_url = state + .config + .review_worker_base_url + .as_ref() + .ok_or(ReviewError::WorkerNotConfigured)?; + + let url = format!("{}{}", base_url.trim_end_matches('/'), path); + + let response = state.http_client.post(&url).json(&body).send().await?; + + let status = response.status(); + let headers = response.headers().clone(); + let bytes = response.bytes().await?; + + let mut builder = Response::builder().status(status); + + if let Some(content_type) = headers.get("content-type") { + builder = builder.header("content-type", content_type); + } + + Ok(builder.body(Body::from(bytes)).unwrap()) +} + +/// POST /review/start - Start review processing on worker +pub async fn start_review( + State(state): State, + Json(body): Json, +) -> Result { + proxy_post_to_worker(&state, "/review/start", body).await +} + +/// GET /review/:id/status - Get review status from worker +pub async fn get_review_status( + State(state): State, + Path(id): Path, +) -> Result { + let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?; + + // Verify review exists in our database + let repo = ReviewRepository::new(state.pool()); + let _review = repo.get_by_id(review_id).await?; + + // Proxy to worker + proxy_to_worker(&state, &format!("/review/{}/status", review_id)).await +} + +/// GET /review/:id/metadata - Get PR metadata from database +pub async fn get_review_metadata( + State(state): State, + Path(id): Path, +) -> Result, ReviewError> { + let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?; + + let repo = ReviewRepository::new(state.pool()); + let review = repo.get_by_id(review_id).await?; + + Ok(Json(ReviewMetadataResponse { + gh_pr_url: review.gh_pr_url, + pr_title: review.pr_title, + })) +} + +/// GET /review/:id - Get complete review result from worker +pub async fn get_review( + State(state): State, + Path(id): Path, +) -> Result { + let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?; + + // Verify review exists in our database + let repo = ReviewRepository::new(state.pool()); + let _review = repo.get_by_id(review_id).await?; + + // Proxy to worker + proxy_to_worker(&state, &format!("/review/{}", review_id)).await +} + +/// GET /review/:id/file/:file_hash - Get file content from worker +pub async fn get_review_file( + State(state): State, + Path((id, file_hash)): Path<(String, String)>, +) -> Result { + let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?; + + // Verify review exists in our database + let repo = ReviewRepository::new(state.pool()); + let _review = repo.get_by_id(review_id).await?; + + // Proxy to worker + proxy_to_worker(&state, &format!("/review/{}/file/{}", review_id, file_hash)).await +} + +/// GET /review/:id/diff - Get diff for review from worker +pub async fn get_review_diff( + State(state): State, + Path(id): Path, +) -> Result { + let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?; + + // Verify review exists in our database + let repo = ReviewRepository::new(state.pool()); + let _review = repo.get_by_id(review_id).await?; + + // Proxy to worker + proxy_to_worker(&state, &format!("/review/{}/diff", review_id)).await +} + +/// POST /review/:id/success - Called by worker when review completes successfully +/// Sends success notification email to the user, or posts PR comment for webhook reviews +pub async fn review_success( + State(state): State, + Path(id): Path, +) -> Result { + let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?; + + // Fetch review from database to get email and PR title + let repo = ReviewRepository::new(state.pool()); + let review = repo.get_by_id(review_id).await?; + + // Mark review as completed + repo.mark_completed(review_id).await?; + + // Build review URL + let review_url = format!("{}/review/{}", state.server_public_base_url, review_id); + + // Check if this is a webhook-triggered review + if review.is_webhook_review() { + // Post PR comment instead of sending email + if let Some(github_app) = state.github_app() { + let comment = format!( + "## Vibe Kanban Review Complete\n\n\ + Your code review is ready!\n\n\ + **[View Review]({})**", + review_url + ); + + let installation_id = review.github_installation_id.unwrap_or(0); + let pr_owner = review.pr_owner.as_deref().unwrap_or(""); + let pr_repo = review.pr_repo.as_deref().unwrap_or(""); + let pr_number = review.pr_number.unwrap_or(0) as u64; + + if let Err(e) = github_app + .post_pr_comment(installation_id, pr_owner, pr_repo, pr_number, &comment) + .await + { + tracing::error!( + ?e, + review_id = %review_id, + "Failed to post success comment to PR" + ); + } + } + } else if let Some(email) = &review.email { + // CLI review - send email notification + state + .mailer + .send_review_ready(email, &review_url, &review.pr_title) + .await; + } + + Ok(StatusCode::OK) +} + +/// POST /review/:id/failed - Called by worker when review fails +/// Sends failure notification email to the user, or posts PR comment for webhook reviews +pub async fn review_failed( + State(state): State, + Path(id): Path, +) -> Result { + let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?; + + // Fetch review from database to get email and PR title + let repo = ReviewRepository::new(state.pool()); + let review = repo.get_by_id(review_id).await?; + + // Mark review as failed + repo.mark_failed(review_id).await?; + + // Check if this is a webhook-triggered review + if review.is_webhook_review() { + // Post PR comment instead of sending email + if let Some(github_app) = state.github_app() { + let comment = format!( + "## Vibe Kanban Review Failed\n\n\ + Unfortunately, the code review could not be completed.\n\n\ + Review ID: `{}`", + review_id + ); + + let installation_id = review.github_installation_id.unwrap_or(0); + let pr_owner = review.pr_owner.as_deref().unwrap_or(""); + let pr_repo = review.pr_repo.as_deref().unwrap_or(""); + let pr_number = review.pr_number.unwrap_or(0) as u64; + + if let Err(e) = github_app + .post_pr_comment(installation_id, pr_owner, pr_repo, pr_number, &comment) + .await + { + tracing::error!( + ?e, + review_id = %review_id, + "Failed to post failure comment to PR" + ); + } + } + } else if let Some(email) = &review.email { + // CLI review - send email notification + state + .mailer + .send_review_failed(email, &review.pr_title, &review_id.to_string()) + .await; + } + + Ok(StatusCode::OK) +} diff --git a/crates/remote/src/state.rs b/crates/remote/src/state.rs index 516e5f2e..45f01b89 100644 --- a/crates/remote/src/state.rs +++ b/crates/remote/src/state.rs @@ -5,7 +5,9 @@ use sqlx::PgPool; use crate::{ auth::{JwtService, OAuthHandoffService, OAuthTokenValidator, ProviderRegistry}, config::RemoteServerConfig, + github_app::GitHubAppService, mail::Mailer, + r2::R2Service, }; #[derive(Clone)] @@ -18,6 +20,8 @@ pub struct AppState { pub http_client: reqwest::Client, handoff: Arc, oauth_token_validator: Arc, + r2: Option, + github_app: Option>, } impl AppState { @@ -31,6 +35,8 @@ impl AppState { mailer: Arc, server_public_base_url: String, http_client: reqwest::Client, + r2: Option, + github_app: Option>, ) -> Self { Self { pool, @@ -41,6 +47,8 @@ impl AppState { http_client, handoff, oauth_token_validator, + r2, + github_app, } } @@ -67,4 +75,12 @@ impl AppState { pub fn oauth_token_validator(&self) -> Arc { Arc::clone(&self.oauth_token_validator) } + + pub fn r2(&self) -> Option<&R2Service> { + self.r2.as_ref() + } + + pub fn github_app(&self) -> Option<&GitHubAppService> { + self.github_app.as_deref() + } } diff --git a/crates/review/Cargo.toml b/crates/review/Cargo.toml new file mode 100644 index 00000000..ef0e1b84 --- /dev/null +++ b/crates/review/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "review" +version = "0.0.134" +edition = "2024" +publish = false + +[[bin]] +name = "review" +path = "src/main.rs" + +[dependencies] +clap = { version = "4", features = ["derive", "env"] } +tokio = { workspace = true } +reqwest = { version = "0.12", features = ["json", "stream"] } +serde = { workspace = true } +serde_json = { workspace = true } +tar = "0.4" +flate2 = "1.0" +indicatif = "0.17" +anyhow = { workspace = true } +thiserror = { workspace = true } +uuid = { version = "1.0", features = ["v4", "serde"] } +chrono = { version = "0.4", features = ["serde"] } +tempfile = "3.8" +tracing = { workspace = true } +tracing-subscriber = { workspace = true } +dialoguer = "0.11" +dirs = "5.0" +toml = "0.8" diff --git a/crates/review/src/api.rs b/crates/review/src/api.rs new file mode 100644 index 00000000..1b3c1b5e --- /dev/null +++ b/crates/review/src/api.rs @@ -0,0 +1,208 @@ +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use tracing::debug; +use uuid::Uuid; + +use crate::error::ReviewError; + +/// API client for the review service +pub struct ReviewApiClient { + client: Client, + base_url: String, +} + +/// Response from POST /review/init +#[derive(Debug, Deserialize)] +pub struct InitResponse { + pub review_id: Uuid, + pub upload_url: String, + pub object_key: String, +} + +/// Request body for POST /review/init +#[derive(Debug, Serialize)] +struct InitRequest { + gh_pr_url: String, + email: String, + pr_title: String, +} + +/// Request body for POST /review/start +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct StartRequest { + pub id: String, + pub title: String, + pub description: String, + pub org: String, + pub repo: String, + pub codebase_url: String, + pub base_commit: String, +} + +/// Response from GET /review/{id}/status +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct StatusResponse { + pub status: ReviewStatus, + pub progress: Option, + pub error: Option, +} + +/// Possible review statuses +#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ReviewStatus { + Queued, + Extracting, + Running, + Completed, + Failed, +} + +impl std::fmt::Display for ReviewStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ReviewStatus::Queued => write!(f, "queued"), + ReviewStatus::Extracting => write!(f, "extracting"), + ReviewStatus::Running => write!(f, "running"), + ReviewStatus::Completed => write!(f, "completed"), + ReviewStatus::Failed => write!(f, "failed"), + } + } +} + +impl ReviewApiClient { + /// Create a new API client + pub fn new(base_url: String) -> Self { + Self { + client: Client::new(), + base_url, + } + } + + /// Initialize a review upload and get a presigned URL + pub async fn init( + &self, + pr_url: &str, + email: &str, + pr_title: &str, + ) -> Result { + let url = format!("{}/v1/review/init", self.base_url); + debug!("POST {url}"); + + let response = self + .client + .post(&url) + .json(&InitRequest { + gh_pr_url: pr_url.to_string(), + email: email.to_string(), + pr_title: pr_title.to_string(), + }) + .send() + .await + .map_err(|e| ReviewError::ApiError(e.to_string()))?; + + if !response.status().is_success() { + let status = response.status(); + let body = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + return Err(ReviewError::ApiError(format!("{status}: {body}"))); + } + + let init_response: InitResponse = response + .json() + .await + .map_err(|e| ReviewError::ApiError(e.to_string()))?; + + debug!("Review ID: {}", init_response.review_id); + + Ok(init_response) + } + + /// Upload the tarball to the presigned URL + pub async fn upload(&self, upload_url: &str, payload: Vec) -> Result<(), ReviewError> { + debug!("PUT {} ({} bytes)", upload_url, payload.len()); + + let response = self + .client + .put(upload_url) + .header("Content-Type", "application/gzip") + .body(payload) + .send() + .await + .map_err(|e| ReviewError::UploadFailed(e.to_string()))?; + + if !response.status().is_success() { + let status = response.status(); + let body = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + return Err(ReviewError::UploadFailed(format!("{status}: {body}"))); + } + + Ok(()) + } + + /// Start the review process + pub async fn start(&self, request: StartRequest) -> Result<(), ReviewError> { + let url = format!("{}/v1/review/start", self.base_url); + debug!("POST {url}"); + + let response = self + .client + .post(&url) + .json(&request) + .send() + .await + .map_err(|e| ReviewError::ApiError(e.to_string()))?; + + if !response.status().is_success() { + let status = response.status(); + let body = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + return Err(ReviewError::ApiError(format!("{status}: {body}"))); + } + + Ok(()) + } + + /// Poll the review status + pub async fn poll_status(&self, review_id: &str) -> Result { + let url = format!("{}/v1/review/{}/status", self.base_url, review_id); + debug!("GET {url}"); + + let response = self + .client + .get(&url) + .send() + .await + .map_err(|e| ReviewError::ApiError(e.to_string()))?; + + if !response.status().is_success() { + let status = response.status(); + let body = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + return Err(ReviewError::ApiError(format!("{status}: {body}"))); + } + + let status_response: StatusResponse = response + .json() + .await + .map_err(|e| ReviewError::ApiError(e.to_string()))?; + + Ok(status_response) + } + + /// Get the review URL for a given review ID + pub fn review_url(&self, review_id: &str) -> String { + format!("{}/review/{}", self.base_url, review_id) + } +} diff --git a/crates/review/src/archive.rs b/crates/review/src/archive.rs new file mode 100644 index 00000000..0fa60c71 --- /dev/null +++ b/crates/review/src/archive.rs @@ -0,0 +1,106 @@ +use std::{fs::File, path::Path}; + +use flate2::{Compression, write::GzEncoder}; +use tar::Builder; +use tracing::debug; + +use crate::error::ReviewError; + +/// Create a tar.gz archive from a directory +pub fn create_tarball(source_dir: &Path) -> Result, ReviewError> { + debug!("Creating tarball from {}", source_dir.display()); + + let mut buffer = Vec::new(); + + { + let encoder = GzEncoder::new(&mut buffer, Compression::default()); + let mut archive = Builder::new(encoder); + + add_directory_to_archive(&mut archive, source_dir, source_dir)?; + + let encoder = archive + .into_inner() + .map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + encoder + .finish() + .map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + } + + debug!("Created tarball: {} bytes", buffer.len()); + + Ok(buffer) +} + +fn add_directory_to_archive( + archive: &mut Builder, + base_dir: &Path, + current_dir: &Path, +) -> Result<(), ReviewError> { + let entries = + std::fs::read_dir(current_dir).map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + + for entry in entries { + let entry = entry.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + let path = entry.path(); + + let relative_path = path + .strip_prefix(base_dir) + .map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + + let metadata = entry + .metadata() + .map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + + if metadata.is_dir() { + // Recursively add directory contents + add_directory_to_archive(archive, base_dir, &path)?; + } else if metadata.is_file() { + // Add file to archive + let mut file = + File::open(&path).map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + archive + .append_file(relative_path, &mut file) + .map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + } + // Skip symlinks and other special files + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_create_tarball() { + let temp_dir = TempDir::new().unwrap(); + let base = temp_dir.path(); + + // Create some test files + std::fs::write(base.join("file1.txt"), "content1").unwrap(); + std::fs::create_dir(base.join("subdir")).unwrap(); + std::fs::write(base.join("subdir/file2.txt"), "content2").unwrap(); + + let tarball = create_tarball(base).expect("Should create tarball"); + + // Verify tarball is not empty + assert!(!tarball.is_empty()); + + // Decompress and verify contents + let decoder = flate2::read::GzDecoder::new(&tarball[..]); + let mut archive = tar::Archive::new(decoder); + + let entries: Vec<_> = archive + .entries() + .unwrap() + .map(|e| e.unwrap().path().unwrap().to_string_lossy().to_string()) + .collect(); + + assert!(entries.contains(&"file1.txt".to_string())); + assert!(entries.contains(&"subdir/file2.txt".to_string())); + } +} diff --git a/crates/review/src/claude_session.rs b/crates/review/src/claude_session.rs new file mode 100644 index 00000000..36a269ec --- /dev/null +++ b/crates/review/src/claude_session.rs @@ -0,0 +1,513 @@ +use std::{ + fs::{self, File}, + io::{BufRead, BufReader}, + path::{Path, PathBuf}, + time::SystemTime, +}; + +use serde::Deserialize; +use tracing::debug; + +use crate::error::ReviewError; + +/// Represents a Claude Code project directory +#[derive(Debug, Clone)] +pub struct ClaudeProject { + pub path: PathBuf, + pub name: String, + pub git_branch: Option, + pub first_prompt: Option, + pub session_count: usize, + pub modified_at: SystemTime, +} + +/// Represents a single session file within a project +#[derive(Debug, Clone)] +pub struct ClaudeSession { + pub path: PathBuf, + pub git_branch: Option, + pub first_prompt: Option, + pub modified_at: SystemTime, +} + +/// A JSONL record for metadata extraction +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct JsonlRecord { + git_branch: Option, + message: Option, +} + +/// Message within a JSONL record +#[derive(Debug, Deserialize)] +struct JsonlMessage { + role: Option, + content: Option, +} + +/// Get the Claude projects directory path (~/.claude/projects) +pub fn get_claude_projects_dir() -> Option { + dirs::home_dir().map(|home| home.join(".claude").join("projects")) +} + +/// Discover all Claude projects, sorted by modification time (most recent first) +/// Aggregates session metadata (git_branch, first_prompt, session_count) from each project's sessions +pub fn discover_projects() -> Result, ReviewError> { + let projects_dir = get_claude_projects_dir().ok_or_else(|| { + ReviewError::SessionDiscoveryFailed("Could not find home directory".into()) + })?; + + if !projects_dir.exists() { + debug!( + "Claude projects directory does not exist: {:?}", + projects_dir + ); + return Ok(Vec::new()); + } + + let mut projects = Vec::new(); + + let entries = fs::read_dir(&projects_dir) + .map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + + for entry in entries { + let entry = entry.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + let path = entry.path(); + + if !path.is_dir() { + continue; + } + + let metadata = entry + .metadata() + .map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + + let modified_at = metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH); + + // Extract a friendly name from the directory name + // e.g., "-private-var-...-worktrees-a04a-store-payloads-i" -> "store-payloads-i" + let dir_name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown"); + + let name = extract_project_name(dir_name); + + // Discover sessions to get aggregated metadata + let sessions = discover_sessions_in_dir(&path)?; + let session_count = sessions.len(); + + // Skip projects with no sessions + if session_count == 0 { + continue; + } + + // Get metadata from the most recent session + let most_recent = &sessions[0]; // Already sorted by modification time + let git_branch = most_recent.git_branch.clone(); + let first_prompt = most_recent.first_prompt.clone(); + + projects.push(ClaudeProject { + path, + name, + git_branch, + first_prompt, + session_count, + modified_at, + }); + } + + // Sort by modification time, most recent first + projects.sort_by(|a, b| b.modified_at.cmp(&a.modified_at)); + + Ok(projects) +} + +/// Extract a friendly project name from the Claude directory name +fn extract_project_name(dir_name: &str) -> String { + // Directory names look like: + // "-private-var-folders-m1-9q-ct1913z10v6wbnv54j25r0000gn-T-vibe-kanban-worktrees-a04a-store-payloads-i" + // We want to extract the meaningful part after "worktrees-" + if let Some(idx) = dir_name.find("worktrees-") { + let after_worktrees = &dir_name[idx + "worktrees-".len()..]; + // Skip the short hash prefix (e.g., "a04a-") + if let Some(dash_idx) = after_worktrees.find('-') { + return after_worktrees[dash_idx + 1..].to_string(); + } + return after_worktrees.to_string(); + } + + // Fallback: use last segment after the final dash + dir_name.rsplit('-').next().unwrap_or(dir_name).to_string() +} + +/// Discover sessions in a project, excluding agent-* files +pub fn discover_sessions(project: &ClaudeProject) -> Result, ReviewError> { + discover_sessions_in_dir(&project.path) +} + +/// Discover sessions in a directory, excluding agent-* files +fn discover_sessions_in_dir(dir_path: &Path) -> Result, ReviewError> { + let mut sessions = Vec::new(); + + let entries = + fs::read_dir(dir_path).map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + + for entry in entries { + let entry = entry.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + let path = entry.path(); + + // Only process .jsonl files + if path.extension().and_then(|e| e.to_str()) != Some("jsonl") { + continue; + } + + let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); + + // Skip agent-* files + if file_name.starts_with("agent-") { + continue; + } + + let metadata = entry + .metadata() + .map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + + let modified_at = metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH); + + // Extract metadata from the JSONL file + let (git_branch, first_prompt) = extract_session_metadata(&path); + + sessions.push(ClaudeSession { + path, + git_branch, + first_prompt, + modified_at, + }); + } + + // Sort by modification time, most recent first + sessions.sort_by(|a, b| b.modified_at.cmp(&a.modified_at)); + + Ok(sessions) +} + +/// Extract session metadata from a JSONL file +/// Returns: (git_branch, first_prompt) +fn extract_session_metadata(path: &Path) -> (Option, Option) { + let file = match File::open(path) { + Ok(f) => f, + Err(_) => return (None, None), + }; + let reader = BufReader::new(file); + + let mut git_branch: Option = None; + let mut first_prompt: Option = None; + + // Check first 50 lines for metadata + for line in reader.lines().take(50) { + let line = match line { + Ok(l) => l, + Err(_) => continue, + }; + if line.trim().is_empty() { + continue; + } + + if let Ok(record) = serde_json::from_str::(&line) { + // Extract git branch if not already found + if git_branch.is_none() && record.git_branch.is_some() { + git_branch = record.git_branch; + } + + // Extract first user prompt if not already found + if first_prompt.is_none() + && let Some(ref message) = record.message + && message.role.as_deref() == Some("user") + && let Some(ref content) = message.content + { + // Content can be a string or an array + if let Some(text) = content.as_str() { + first_prompt = Some(truncate_string(text, 60)); + } + } + + // Stop early if we have both + if git_branch.is_some() && first_prompt.is_some() { + break; + } + } + } + + (git_branch, first_prompt) +} + +/// Truncate a string to max length, adding "..." if truncated +fn truncate_string(s: &str, max_len: usize) -> String { + // Replace newlines with spaces for display + let s = s.replace('\n', " "); + if s.len() <= max_len { + s + } else { + format!("{}...", &s[..max_len - 3]) + } +} + +/// Find projects matching a specific git branch using fuzzy matching +/// Returns matching projects with all their sessions +pub fn find_projects_by_branch( + projects: &[ClaudeProject], + target_branch: &str, +) -> Result)>, ReviewError> { + let mut matches = Vec::new(); + + for project in projects { + // Check if project's branch matches + if let Some(ref project_branch) = project.git_branch + && branches_match(target_branch, project_branch) + { + let sessions = discover_sessions(project)?; + matches.push((project.clone(), sessions)); + } + } + + // Sort by modification time, most recent first + matches.sort_by(|a, b| b.0.modified_at.cmp(&a.0.modified_at)); + + Ok(matches) +} + +/// Check if two branch names match using fuzzy matching +fn branches_match(target: &str, session_branch: &str) -> bool { + let target_normalized = normalize_branch(target); + let session_normalized = normalize_branch(session_branch); + + // Exact match after normalization + if target_normalized == session_normalized { + return true; + } + + // Check if the slug portions match (e.g., "feature-auth" matches "vk/feature-auth") + let target_slug = extract_branch_slug(&target_normalized); + let session_slug = extract_branch_slug(&session_normalized); + + target_slug == session_slug && !target_slug.is_empty() +} + +/// Normalize a branch name by stripping common prefixes +fn normalize_branch(branch: &str) -> String { + let branch = branch.strip_prefix("refs/heads/").unwrap_or(branch); + + branch.to_lowercase() +} + +/// Extract the "slug" portion of a branch name +/// e.g., "vk/a04a-store-payloads-i" -> "a04a-store-payloads-i" +fn extract_branch_slug(branch: &str) -> String { + // Split by '/' and take the last part + branch.rsplit('/').next().unwrap_or(branch).to_string() +} + +/// A record with timestamp for sorting +struct TimestampedMessage { + timestamp: String, + message: serde_json::Value, +} + +/// Concatenate multiple JSONL files into a single JSON array of messages. +/// +/// Filters to include only: +/// - User messages (role = "user") +/// - Assistant messages with text content (role = "assistant" with content[].type = "text") +/// +/// For assistant messages, only text content blocks are kept (tool_use, etc. are filtered out). +pub fn concatenate_sessions_to_json(session_paths: &[PathBuf]) -> Result { + let mut all_messages: Vec = Vec::new(); + + for path in session_paths { + let file = File::open(path) + .map_err(|e| ReviewError::JsonlParseFailed(format!("{}: {}", path.display(), e)))?; + let reader = BufReader::new(file); + + for (line_num, line) in reader.lines().enumerate() { + let line = line.map_err(|e| { + ReviewError::JsonlParseFailed(format!("{}:{}: {}", path.display(), line_num + 1, e)) + })?; + + if line.trim().is_empty() { + continue; + } + + let record: serde_json::Value = serde_json::from_str(&line).map_err(|e| { + ReviewError::JsonlParseFailed(format!("{}:{}: {}", path.display(), line_num + 1, e)) + })?; + + // Extract timestamp for sorting + let timestamp = record + .get("timestamp") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + + // Extract and filter the message + if let Some(message) = extract_filtered_message(&record) { + all_messages.push(TimestampedMessage { timestamp, message }); + } + } + } + + // Sort by timestamp + all_messages.sort_by(|a, b| a.timestamp.cmp(&b.timestamp)); + + // Extract just the messages + let messages: Vec = all_messages.into_iter().map(|m| m.message).collect(); + + serde_json::to_string(&messages).map_err(|e| ReviewError::JsonlParseFailed(e.to_string())) +} + +/// Extract and filter a message from a JSONL record. +/// +/// Returns Some(message) if the record should be included, None otherwise. +/// - User messages: include if content is a string, or if content array has text blocks +/// - Assistant messages: include if content array has text blocks (filter out tool_use, etc.) +fn extract_filtered_message(record: &serde_json::Value) -> Option { + let message = record.get("message")?; + let role = message.get("role")?.as_str()?; + let content = message.get("content")?; + + match role { + "user" => { + // If content is a string, include directly + if content.is_string() { + return Some(message.clone()); + } + + // If content is an array, filter to text blocks only + if let Some(content_array) = content.as_array() { + let text_blocks: Vec = content_array + .iter() + .filter(|block| block.get("type").and_then(|t| t.as_str()) == Some("text")) + .cloned() + .collect(); + + // Skip if no text content (e.g., only tool_result) + if text_blocks.is_empty() { + return None; + } + + // Create filtered message with only text content + let mut filtered_message = serde_json::Map::new(); + filtered_message.insert( + "role".to_string(), + serde_json::Value::String("user".to_string()), + ); + filtered_message + .insert("content".to_string(), serde_json::Value::Array(text_blocks)); + + return Some(serde_json::Value::Object(filtered_message)); + } + + None + } + "assistant" => { + // Filter assistant messages to only include text content + if let Some(content_array) = content.as_array() { + // Filter to only text blocks + let text_blocks: Vec = content_array + .iter() + .filter(|block| block.get("type").and_then(|t| t.as_str()) == Some("text")) + .cloned() + .collect(); + + // Skip if no text content + if text_blocks.is_empty() { + return None; + } + + // Create filtered message with only text content + let mut filtered_message = serde_json::Map::new(); + filtered_message.insert( + "role".to_string(), + serde_json::Value::String("assistant".to_string()), + ); + filtered_message + .insert("content".to_string(), serde_json::Value::Array(text_blocks)); + + Some(serde_json::Value::Object(filtered_message)) + } else { + // Content is not an array (unusual), skip + None + } + } + _ => None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extract_project_name() { + assert_eq!( + extract_project_name( + "-private-var-folders-m1-9q-ct1913z10v6wbnv54j25r0000gn-T-vibe-kanban-worktrees-a04a-store-payloads-i" + ), + "store-payloads-i" + ); + + assert_eq!( + extract_project_name( + "-private-var-folders-m1-9q-ct1913z10v6wbnv54j25r0000gn-T-vibe-kanban-worktrees-1ff1-new-rust-binary" + ), + "new-rust-binary" + ); + } + + #[test] + fn test_branches_match() { + // Exact match + assert!(branches_match("feature-auth", "feature-auth")); + + // With prefix + assert!(branches_match("feature-auth", "vk/feature-auth")); + assert!(branches_match("vk/feature-auth", "feature-auth")); + + // Slug matching + assert!(branches_match( + "a04a-store-payloads-i", + "vk/a04a-store-payloads-i" + )); + + // Case insensitive + assert!(branches_match("Feature-Auth", "feature-auth")); + + // Non-matches + assert!(!branches_match("feature-auth", "feature-other")); + assert!(!branches_match("main", "feature-auth")); + + // Regression tests: substring matches should NOT match + // (these were incorrectly matching before the fix) + assert!(!branches_match("vk/d13f-remove-compare-c", "c")); + assert!(!branches_match("vk/d13f-remove-compare-c", "compare")); + assert!(!branches_match("feature-auth", "auth")); + assert!(!branches_match("feature-auth", "feature")); + } + + #[test] + fn test_normalize_branch() { + assert_eq!(normalize_branch("refs/heads/main"), "main"); + assert_eq!(normalize_branch("Feature-Auth"), "feature-auth"); + assert_eq!(normalize_branch("vk/feature-auth"), "vk/feature-auth"); + } + + #[test] + fn test_extract_branch_slug() { + assert_eq!(extract_branch_slug("vk/feature-auth"), "feature-auth"); + assert_eq!(extract_branch_slug("feature-auth"), "feature-auth"); + assert_eq!( + extract_branch_slug("user/prefix/feature-auth"), + "feature-auth" + ); + } +} diff --git a/crates/review/src/config.rs b/crates/review/src/config.rs new file mode 100644 index 00000000..2908f737 --- /dev/null +++ b/crates/review/src/config.rs @@ -0,0 +1,47 @@ +use std::path::PathBuf; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Default, Serialize, Deserialize)] +pub struct Config { + #[serde(default)] + pub email: Option, +} + +impl Config { + /// Get the path to the config file (~/.config/vibe-kanban/review.toml) + fn config_path() -> Option { + dirs::config_dir().map(|p| p.join("vibe-kanban").join("review.toml")) + } + + /// Load config from disk, returning default if file doesn't exist + pub fn load() -> Self { + let Some(path) = Self::config_path() else { + return Self::default(); + }; + + if !path.exists() { + return Self::default(); + } + + match std::fs::read_to_string(&path) { + Ok(contents) => toml::from_str(&contents).unwrap_or_default(), + Err(_) => Self::default(), + } + } + + /// Save config to disk + pub fn save(&self) -> std::io::Result<()> { + let Some(path) = Self::config_path() else { + return Ok(()); + }; + + // Create parent directories if needed + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent)?; + } + + let contents = toml::to_string_pretty(self).unwrap_or_default(); + std::fs::write(&path, contents) + } +} diff --git a/crates/review/src/error.rs b/crates/review/src/error.rs new file mode 100644 index 00000000..7388fab6 --- /dev/null +++ b/crates/review/src/error.rs @@ -0,0 +1,43 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum ReviewError { + #[error("GitHub CLI (gh) is not installed. Install it from https://cli.github.com/")] + GhNotInstalled, + + #[error("GitHub CLI is not authenticated. Run 'gh auth login' first.")] + GhNotAuthenticated, + + #[error("Invalid GitHub PR URL format. Expected: https://github.com/owner/repo/pull/123")] + InvalidPrUrl, + + #[error("Failed to get PR information: {0}")] + PrInfoFailed(String), + + #[error("Failed to clone repository: {0}")] + CloneFailed(String), + + #[error("Failed to checkout PR: {0}")] + CheckoutFailed(String), + + #[error("Failed to create archive: {0}")] + ArchiveFailed(String), + + #[error("API request failed: {0}")] + ApiError(String), + + #[error("Upload failed: {0}")] + UploadFailed(String), + + #[error("Review failed: {0}")] + ReviewFailed(String), + + #[error("Review timed out after 10 minutes")] + Timeout, + + #[error("Failed to discover Claude Code sessions: {0}")] + SessionDiscoveryFailed(String), + + #[error("Failed to parse JSONL file: {0}")] + JsonlParseFailed(String), +} diff --git a/crates/review/src/github.rs b/crates/review/src/github.rs new file mode 100644 index 00000000..86e6d66b --- /dev/null +++ b/crates/review/src/github.rs @@ -0,0 +1,229 @@ +use std::{path::Path, process::Command}; + +use serde::Deserialize; +use tracing::debug; + +use crate::error::ReviewError; + +/// Information about a pull request +#[derive(Debug)] +pub struct PrInfo { + pub owner: String, + pub repo: String, + pub title: String, + pub description: String, + pub base_commit: String, + pub head_commit: String, + pub head_ref_name: String, +} + +/// Response from `gh pr view --json` +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct GhPrView { + title: String, + body: String, + base_ref_oid: String, + head_ref_oid: String, + head_ref_name: String, +} + +/// Parse a GitHub PR URL to extract owner, repo, and PR number +/// +/// Expected format: https://github.com/owner/repo/pull/123 +pub fn parse_pr_url(url: &str) -> Result<(String, String, i64), ReviewError> { + let url = url.trim(); + + // Remove trailing slashes + let url = url.trim_end_matches('/'); + + // Try to parse as URL + let parts: Vec<&str> = url.split('/').collect(); + + // Find the index of "github.com" and then extract owner/repo/pull/number + let github_idx = parts + .iter() + .position(|&p| p == "github.com") + .ok_or(ReviewError::InvalidPrUrl)?; + + // We need at least: github.com / owner / repo / pull / number + if parts.len() < github_idx + 5 { + return Err(ReviewError::InvalidPrUrl); + } + + let owner = parts[github_idx + 1].to_string(); + let repo = parts[github_idx + 2].to_string(); + + if parts[github_idx + 3] != "pull" { + return Err(ReviewError::InvalidPrUrl); + } + + let pr_number: i64 = parts[github_idx + 4] + .parse() + .map_err(|_| ReviewError::InvalidPrUrl)?; + + if owner.is_empty() || repo.is_empty() || pr_number <= 0 { + return Err(ReviewError::InvalidPrUrl); + } + + Ok((owner, repo, pr_number)) +} + +/// Check if the GitHub CLI is installed +fn ensure_gh_available() -> Result<(), ReviewError> { + let output = Command::new("which") + .arg("gh") + .output() + .map_err(|_| ReviewError::GhNotInstalled)?; + + if !output.status.success() { + return Err(ReviewError::GhNotInstalled); + } + + Ok(()) +} + +/// Get PR information using `gh pr view` +pub fn get_pr_info(owner: &str, repo: &str, pr_number: i64) -> Result { + ensure_gh_available()?; + + debug!("Fetching PR info for {owner}/{repo}#{pr_number}"); + + let output = Command::new("gh") + .args([ + "pr", + "view", + &pr_number.to_string(), + "--repo", + &format!("{owner}/{repo}"), + "--json", + "title,body,baseRefOid,headRefOid,headRefName", + ]) + .output() + .map_err(|e| ReviewError::PrInfoFailed(e.to_string()))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + let lower = stderr.to_ascii_lowercase(); + + if lower.contains("authentication") + || lower.contains("gh auth login") + || lower.contains("unauthorized") + { + return Err(ReviewError::GhNotAuthenticated); + } + + return Err(ReviewError::PrInfoFailed(stderr.to_string())); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let pr_view: GhPrView = + serde_json::from_str(&stdout).map_err(|e| ReviewError::PrInfoFailed(e.to_string()))?; + + Ok(PrInfo { + owner: owner.to_string(), + repo: repo.to_string(), + title: pr_view.title, + description: pr_view.body, + base_commit: pr_view.base_ref_oid, + head_commit: pr_view.head_ref_oid, + head_ref_name: pr_view.head_ref_name, + }) +} + +/// Clone a repository using `gh repo clone` +pub fn clone_repo(owner: &str, repo: &str, target_dir: &Path) -> Result<(), ReviewError> { + ensure_gh_available()?; + + debug!("Cloning {owner}/{repo} to {}", target_dir.display()); + + let output = Command::new("gh") + .args([ + "repo", + "clone", + &format!("{owner}/{repo}"), + target_dir + .to_str() + .ok_or_else(|| ReviewError::CloneFailed("Invalid target path".to_string()))?, + ]) + .output() + .map_err(|e| ReviewError::CloneFailed(e.to_string()))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(ReviewError::CloneFailed(stderr.to_string())); + } + + Ok(()) +} + +/// Checkout a specific commit by SHA +/// +/// This is more reliable than `gh pr checkout` because it works even when +/// the PR's branch has been deleted (common for merged PRs). +pub fn checkout_commit(commit_sha: &str, repo_dir: &Path) -> Result<(), ReviewError> { + debug!("Fetching commit {commit_sha} in {}", repo_dir.display()); + + // First, fetch the specific commit + let output = Command::new("git") + .args(["fetch", "origin", commit_sha]) + .current_dir(repo_dir) + .output() + .map_err(|e| ReviewError::CheckoutFailed(e.to_string()))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(ReviewError::CheckoutFailed(format!( + "Failed to fetch commit: {stderr}" + ))); + } + + debug!("Checking out commit {commit_sha}"); + + // Then checkout the commit + let output = Command::new("git") + .args(["checkout", commit_sha]) + .current_dir(repo_dir) + .output() + .map_err(|e| ReviewError::CheckoutFailed(e.to_string()))?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(ReviewError::CheckoutFailed(format!( + "Failed to checkout commit: {stderr}" + ))); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_pr_url_valid() { + let (owner, repo, pr) = parse_pr_url("https://github.com/anthropics/claude-code/pull/123") + .expect("Should parse valid URL"); + assert_eq!(owner, "anthropics"); + assert_eq!(repo, "claude-code"); + assert_eq!(pr, 123); + } + + #[test] + fn test_parse_pr_url_with_trailing_slash() { + let (owner, repo, pr) = + parse_pr_url("https://github.com/owner/repo/pull/456/").expect("Should parse"); + assert_eq!(owner, "owner"); + assert_eq!(repo, "repo"); + assert_eq!(pr, 456); + } + + #[test] + fn test_parse_pr_url_invalid_format() { + assert!(parse_pr_url("https://github.com/owner/repo").is_err()); + assert!(parse_pr_url("https://github.com/owner/repo/issues/123").is_err()); + assert!(parse_pr_url("https://gitlab.com/owner/repo/pull/123").is_err()); + assert!(parse_pr_url("not a url").is_err()); + } +} diff --git a/crates/review/src/main.rs b/crates/review/src/main.rs new file mode 100644 index 00000000..f00680b0 --- /dev/null +++ b/crates/review/src/main.rs @@ -0,0 +1,255 @@ +mod api; +mod archive; +mod claude_session; +mod config; +mod error; +mod github; +mod session_selector; + +use std::time::Duration; + +use anyhow::Result; +use api::{ReviewApiClient, ReviewStatus, StartRequest}; +use clap::Parser; +use error::ReviewError; +use github::{checkout_commit, clone_repo, get_pr_info, parse_pr_url}; +use indicatif::{ProgressBar, ProgressStyle}; +use tempfile::TempDir; +use tracing::debug; +use tracing_subscriber::EnvFilter; + +const DEFAULT_API_URL: &str = "https://api.dev.vibekanban.com"; +const POLL_INTERVAL: Duration = Duration::from_secs(10); +const TIMEOUT: Duration = Duration::from_secs(600); // 10 minutes + +const BANNER: &str = r#" +██████╗ ███████╗██╗ ██╗██╗███████╗██╗ ██╗ ███████╗ █████╗ ███████╗████████╗ +██╔══██╗██╔════╝██║ ██║██║██╔════╝██║ ██║ ██╔════╝██╔══██╗██╔════╝╚══██╔══╝ +██████╔╝█████╗ ██║ ██║██║█████╗ ██║ █╗ ██║ █████╗ ███████║███████╗ ██║ +██╔══██╗██╔══╝ ╚██╗ ██╔╝██║██╔══╝ ██║███╗██║ ██╔══╝ ██╔══██║╚════██║ ██║ +██║ ██║███████╗ ╚████╔╝ ██║███████╗╚███╔███╔╝██╗██║ ██║ ██║███████║ ██║ +╚═╝ ╚═╝╚══════╝ ╚═══╝ ╚═╝╚══════╝ ╚══╝╚══╝ ╚═╝╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═╝ + +"#; + +#[derive(Parser, Debug)] +#[command(name = "review")] +#[command( + about = "Vibe-Kanban Review helps you review GitHub pull requests by turning them into a clear, story-driven summary instead of a wall of diffs. You provide a pull request URL, optionally link a Claude Code project for additional context, and it builds a narrative that highlights key events and important decisions, helping you prioritise what actually needs attention. It's particularly useful when reviewing large amounts of AI-generated code. Note that code is uploaded to and processed on Vibe-Kanban servers using AI." +)] +#[command(version)] +struct Args { + /// GitHub PR URL (e.g., https://github.com/owner/repo/pull/123) + pr_url: String, + + /// Enable verbose output + #[arg(short, long, default_value_t = false)] + verbose: bool, + + /// API base URL + #[arg(long, env = "REVIEW_API_URL", default_value = DEFAULT_API_URL)] + api_url: String, +} + +fn show_disclaimer() { + println!(); + println!( + "DISCLAIMER: Your code will be processed on our secure remote servers, all artefacts (code, AI logs, etc...) will be deleted after 14 days." + ); + println!(); + println!("Full terms and conditions and privacy policy: https://review.fast/terms"); + println!(); + println!("Press Enter to accept and continue..."); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).ok(); +} + +fn prompt_email(config: &mut config::Config) -> String { + use dialoguer::Input; + + let mut input: Input = + Input::new().with_prompt("Email address (we'll send a link to the review here, no spam)"); + + if let Some(ref saved_email) = config.email { + input = input.default(saved_email.clone()); + } + + let email: String = input.interact_text().expect("Failed to read email"); + + // Save email for next time + config.email = Some(email.clone()); + if let Err(e) = config.save() { + debug!("Failed to save config: {}", e); + } + + email +} + +fn create_spinner(message: &str) -> ProgressBar { + let spinner = ProgressBar::new_spinner(); + spinner.set_style( + ProgressStyle::default_spinner() + .template("{spinner:.green} {msg}") + .expect("Invalid spinner template"), + ); + spinner.set_message(message.to_string()); + spinner.enable_steady_tick(Duration::from_millis(100)); + spinner +} + +#[tokio::main] +async fn main() -> Result<()> { + let args = Args::parse(); + + // Initialize tracing + let filter = if args.verbose { + EnvFilter::new("debug") + } else { + EnvFilter::new("warn") + }; + tracing_subscriber::fmt().with_env_filter(filter).init(); + + println!("{}", BANNER); + + show_disclaimer(); + + debug!("Args: {:?}", args); + + // Run the main flow and handle errors + if let Err(e) = run(args).await { + eprintln!("Error: {e}"); + std::process::exit(1); + } + + Ok(()) +} + +async fn run(args: Args) -> Result<(), ReviewError> { + // 1. Load config and prompt for email + let mut config = config::Config::load(); + let email = prompt_email(&mut config); + + // 2. Parse PR URL + let spinner = create_spinner("Parsing PR URL..."); + let (owner, repo, pr_number) = parse_pr_url(&args.pr_url)?; + spinner.finish_with_message(format!("PR: {owner}/{repo}#{pr_number}")); + + // 3. Get PR info + let spinner = create_spinner("Fetching PR information..."); + let pr_info = get_pr_info(&owner, &repo, pr_number)?; + spinner.finish_with_message(format!("PR: {}", pr_info.title)); + + // 4. Select Claude Code session (optional) + let session_files = match session_selector::select_session(&pr_info.head_ref_name) { + Ok(session_selector::SessionSelection::Selected(files)) => { + println!(" Selected {} session file(s)", files.len()); + Some(files) + } + Ok(session_selector::SessionSelection::Skipped) => { + println!(" Skipping project attachment"); + None + } + Err(e) => { + debug!("Session selection error: {}", e); + println!(" No sessions found"); + None + } + }; + + // 5. Clone repository to temp directory + let temp_dir = TempDir::new().map_err(|e| ReviewError::CloneFailed(e.to_string()))?; + let repo_dir = temp_dir.path().join(&repo); + + let spinner = create_spinner("Cloning repository..."); + clone_repo(&owner, &repo, &repo_dir)?; + spinner.finish_with_message("Repository cloned"); + + // 6. Checkout PR head commit + let spinner = create_spinner("Checking out PR..."); + checkout_commit(&pr_info.head_commit, &repo_dir)?; + spinner.finish_with_message("PR checked out"); + + // 7. Create tarball (with optional session data) + let spinner = create_spinner("Creating archive..."); + + // If sessions were selected, write .agent-messages.json to repo root + if let Some(ref files) = session_files { + let json_content = claude_session::concatenate_sessions_to_json(files)?; + let agent_messages_path = repo_dir.join(".agent-messages.json"); + std::fs::write(&agent_messages_path, json_content) + .map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?; + } + + let payload = archive::create_tarball(&repo_dir)?; + let size_mb = payload.len() as f64 / 1_048_576.0; + spinner.finish_with_message(format!("Archive created ({size_mb:.2} MB)")); + + // 8. Initialize review + let client = ReviewApiClient::new(args.api_url.clone()); + let spinner = create_spinner("Initializing review..."); + let init_response = client.init(&args.pr_url, &email, &pr_info.title).await?; + spinner.finish_with_message(format!("Review ID: {}", init_response.review_id)); + + // 9. Upload archive + let spinner = create_spinner("Uploading archive..."); + client.upload(&init_response.upload_url, payload).await?; + spinner.finish_with_message("Upload complete"); + + // 10. Start review + let spinner = create_spinner("Starting review..."); + let codebase_url = format!("r2://{}", init_response.object_key); + client + .start(StartRequest { + id: init_response.review_id.to_string(), + title: pr_info.title, + description: pr_info.description, + org: pr_info.owner, + repo: pr_info.repo, + codebase_url, + base_commit: pr_info.base_commit, + }) + .await?; + spinner.finish_with_message(format!("Review started, we'll send you an email at {} when the review is ready. This can take a few minutes, you may now close the terminal", email)); + + // 11. Poll for completion + let spinner = create_spinner("Review in progress..."); + let start_time = std::time::Instant::now(); + + loop { + tokio::time::sleep(POLL_INTERVAL).await; + + // Check for timeout + if start_time.elapsed() > TIMEOUT { + spinner.finish_with_message("Timed out"); + return Err(ReviewError::Timeout); + } + + let status = client + .poll_status(&init_response.review_id.to_string()) + .await?; + + match status.status { + ReviewStatus::Completed => { + spinner.finish_with_message("Review completed!"); + break; + } + ReviewStatus::Failed => { + spinner.finish_with_message("Review failed"); + let error_msg = status.error.unwrap_or_else(|| "Unknown error".to_string()); + return Err(ReviewError::ReviewFailed(error_msg)); + } + _ => { + let progress = status.progress.unwrap_or_else(|| status.status.to_string()); + spinner.set_message(format!("Review in progress: {progress}")); + } + } + } + + // 12. Print result URL + let review_url = client.review_url(&init_response.review_id.to_string()); + println!("\nReview available at:"); + println!(" {review_url}"); + + Ok(()) +} diff --git a/crates/review/src/session_selector.rs b/crates/review/src/session_selector.rs new file mode 100644 index 00000000..6a9687b9 --- /dev/null +++ b/crates/review/src/session_selector.rs @@ -0,0 +1,173 @@ +use std::{path::PathBuf, time::SystemTime}; + +use dialoguer::{Select, theme::ColorfulTheme}; +use tracing::debug; + +use crate::{ + claude_session::{ + ClaudeProject, discover_projects, discover_sessions, find_projects_by_branch, + }, + error::ReviewError, +}; + +/// Result of session selection process +pub enum SessionSelection { + /// User selected session files to include (all sessions from a project) + Selected(Vec), + /// User chose to skip session attachment + Skipped, +} + +/// Prompt user to select a Claude Code project +/// +/// Flow: +/// 1. Try auto-match by branch name +/// 2. If match found, confirm with user +/// 3. If no match or user declines, show scrollable project list +/// 4. Allow user to skip entirely +/// +/// When a project is selected, ALL sessions from that project are included. +pub fn select_session(pr_branch: &str) -> Result { + debug!( + "Looking for Claude Code projects matching branch: {}", + pr_branch + ); + + let projects = discover_projects()?; + + if projects.is_empty() { + debug!("No Claude Code projects found"); + return Ok(SessionSelection::Skipped); + } + + // Try auto-match by branch + let matches = find_projects_by_branch(&projects, pr_branch)?; + + if !matches.is_empty() { + // Found a matching project, ask for confirmation + let (project, sessions) = &matches[0]; + + println!(); + println!(); + println!( + "Found matching Claude Code project for branch '{}'", + pr_branch + ); + println!(" Project: {}", project.name); + if let Some(ref prompt) = project.first_prompt { + println!(" \"{}\"", prompt); + } + println!( + " {} session{} · Last modified: {}", + project.session_count, + if project.session_count == 1 { "" } else { "s" }, + format_time_ago(project.modified_at) + ); + println!(); + + let selection = Select::with_theme(&ColorfulTheme::default()) + .with_prompt("Use this project to improve review quality?") + .items(&[ + "Yes, use this project", + "No, choose a different project", + "Skip (generate review from just code changes)", + ]) + .default(0) + .interact() + .map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + + match selection { + 0 => { + // Yes, use all sessions from this project + let paths: Vec = sessions.iter().map(|s| s.path.clone()).collect(); + return Ok(SessionSelection::Selected(paths)); + } + 2 => { + // Skip + return Ok(SessionSelection::Skipped); + } + _ => { + // Fall through to manual selection + } + } + } + + // Manual selection: select a project + select_project(&projects) +} + +/// Manual project selection - returns all sessions from selected project +fn select_project(projects: &[ClaudeProject]) -> Result { + // Build project list with rich metadata + let mut items: Vec = Vec::new(); + items.push("Skip (no project)\n".to_string()); + items.extend(projects.iter().map(format_project_item)); + items.push("Skip (no project)\n".to_string()); + + println!(); + println!(); + let selection = Select::with_theme(&ColorfulTheme::default()) + .with_prompt("Select a Claude Code project to improve review quality") + .items(&items) + .default(0) + .max_length(5) + .interact() + .map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?; + + // Skip option + if selection == 0 || selection == items.len() - 1 { + return Ok(SessionSelection::Skipped); + } + + let project = &projects[selection]; + let sessions = discover_sessions(project)?; + + // Return all session paths from this project + let paths: Vec = sessions.iter().map(|s| s.path.clone()).collect(); + Ok(SessionSelection::Selected(paths)) +} + +/// Format a project item for display in the selection list +fn format_project_item(project: &ClaudeProject) -> String { + let prompt_line = project + .first_prompt + .as_ref() + .map(|p| format!("\n \"{}\"", p)) + .unwrap_or_default(); + + let branch = project + .git_branch + .as_ref() + .map(|b| format!("branch: {}", b)) + .unwrap_or_else(|| "no branch".to_string()); + + format!( + "{}{}\n {} · {} session{} · {}\n", + project.name, + prompt_line, + branch, + project.session_count, + if project.session_count == 1 { "" } else { "s" }, + format_time_ago(project.modified_at) + ) +} + +/// Format a SystemTime as a human-readable "time ago" string +fn format_time_ago(time: SystemTime) -> String { + let now = SystemTime::now(); + let duration = now.duration_since(time).unwrap_or_default(); + let secs = duration.as_secs(); + + if secs < 60 { + "just now".to_string() + } else if secs < 3600 { + let mins = secs / 60; + format!("{} minute{} ago", mins, if mins == 1 { "" } else { "s" }) + } else if secs < 86400 { + let hours = secs / 3600; + format!("{} hour{} ago", hours, if hours == 1 { "" } else { "s" }) + } else { + let days = secs / 86400; + format!("{} day{} ago", days, if days == 1 { "" } else { "s" }) + } +} diff --git a/local-build.sh b/local-build.sh index 13bc414f..c17046fb 100755 --- a/local-build.sh +++ b/local-build.sh @@ -27,7 +27,14 @@ zip -q vibe-kanban-mcp.zip vibe-kanban-mcp rm -f vibe-kanban-mcp mv vibe-kanban-mcp.zip npx-cli/dist/macos-arm64/vibe-kanban-mcp.zip +# Copy the Review CLI binary +cp target/release/review vibe-kanban-review +zip -q vibe-kanban-review.zip vibe-kanban-review +rm -f vibe-kanban-review +mv vibe-kanban-review.zip npx-cli/dist/macos-arm64/vibe-kanban-review.zip + echo "✅ NPM package ready!" echo "📁 Files created:" echo " - npx-cli/dist/macos-arm64/vibe-kanban.zip" echo " - npx-cli/dist/macos-arm64/vibe-kanban-mcp.zip" +echo " - npx-cli/dist/macos-arm64/vibe-kanban-review.zip" diff --git a/npx-cli/bin/cli.js b/npx-cli/bin/cli.js index 5f16d438..bd03c082 100755 --- a/npx-cli/bin/cli.js +++ b/npx-cli/bin/cli.js @@ -69,7 +69,9 @@ function getBinaryName(base) { const platformDir = getPlatformDir(); const extractDir = path.join(__dirname, "..", "dist", platformDir); +const args = process.argv.slice(2); const isMcpMode = process.argv.includes("--mcp"); +const isReviewMode = args[0] === "review"; // ensure output dir fs.mkdirSync(extractDir, { recursive: true }); @@ -142,6 +144,17 @@ if (isMcpMode) { }); process.on("SIGTERM", () => proc.kill("SIGTERM")); }); +} else if (isReviewMode) { + extractAndRun("vibe-kanban-review", (bin) => { + // Pass all args except 'review' to the binary + const reviewArgs = args.slice(1); + const proc = spawn(bin, reviewArgs, { stdio: "inherit" }); + proc.on("exit", (c) => process.exit(c || 0)); + proc.on("error", (e) => { + console.error("❌ Review CLI error:", e.message); + process.exit(1); + }); + }); } else { console.log(`📦 Extracting vibe-kanban...`); extractAndRun("vibe-kanban", (bin) => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b35200dc..93914486 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -270,6 +270,24 @@ importers: remote-frontend: dependencies: + '@git-diff-view/file': + specifier: ^0.0.30 + version: 0.0.30 + '@git-diff-view/react': + specifier: ^0.0.30 + version: 0.0.30(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + clsx: + specifier: ^2.1.1 + version: 2.1.1 + highlight.js: + specifier: ^11.11.1 + version: 11.11.1 + posthog-js: + specifier: ^1.283.0 + version: 1.283.0 + prettier: + specifier: ^3.6.1 + version: 3.6.1 react: specifier: ^18.2.0 version: 18.3.1 @@ -279,6 +297,9 @@ importers: react-router-dom: specifier: ^7.9.5 version: 7.9.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + tailwind-merge: + specifier: ^2.6.0 + version: 2.6.0 devDependencies: '@types/react': specifier: ^18.2.43 diff --git a/remote-frontend/.env.example b/remote-frontend/.env.production.example similarity index 64% rename from remote-frontend/.env.example rename to remote-frontend/.env.production.example index 8d599aa9..7cbc185f 100644 --- a/remote-frontend/.env.example +++ b/remote-frontend/.env.production.example @@ -3,3 +3,7 @@ VITE_API_BASE_URL=http://localhost:3000 # App base URL (frontend) VITE_APP_BASE_URL=http://localhost:3000 + +# PostHog analytics +VITE_PUBLIC_POSTHOG_KEY= +VITE_PUBLIC_POSTHOG_HOST= diff --git a/remote-frontend/index.html b/remote-frontend/index.html index 955d4fea..51af522d 100644 --- a/remote-frontend/index.html +++ b/remote-frontend/index.html @@ -3,6 +3,8 @@ + + Vibe Kanban Remote diff --git a/remote-frontend/package.json b/remote-frontend/package.json index 29ce2314..e94217fb 100644 --- a/remote-frontend/package.json +++ b/remote-frontend/package.json @@ -6,12 +6,20 @@ "scripts": { "dev": "vite", "build": "tsc && vite build", - "preview": "vite preview" + "preview": "vite preview", + "format": "prettier --write \"src/**/*.{ts,tsx,js,jsx,json,css,md}\"" }, "dependencies": { + "@git-diff-view/file": "^0.0.30", + "@git-diff-view/react": "^0.0.30", + "clsx": "^2.1.1", + "highlight.js": "^11.11.1", + "posthog-js": "^1.283.0", + "prettier": "^3.6.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-router-dom": "^7.9.5" + "react-router-dom": "^7.9.5", + "tailwind-merge": "^2.6.0" }, "devDependencies": { "@types/react": "^18.2.43", @@ -23,4 +31,4 @@ "typescript": "^5.9.2", "vite": "^5.0.8" } -} +} \ No newline at end of file diff --git a/remote-frontend/public/favicon.png b/remote-frontend/public/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..53827133a9529b20fb9ccde51430b53d207e6bf5 GIT binary patch literal 13074 zcmZWvcRbbK|3CLy*UBY(1wM|k};D304UYfl=R@wwts(6#PDyu2`3Wx1L2{k zssPIRSeF670CgpKeIN6+EK+~{;l0CY@nhRpUx-OLq>VAUWGWprC`}rAev?ndr|ctp>;K9`sEZ_f ziq!A?-6Vb0v^RC!Y+t{asY!DfdLK_!3q~WS*;=Js5J&b~3B**UfwuW{awn<|LUKF6phMe{x<3q6S3Ut6Zy6+L9(eZCagn>YWG6zP)z){iv! zN%STS01gLqQh$|(x;PX66Y||%lCk+5GiqvKkuz<750gua8)auk#aJc)kyjK11C@v+ zPk=k4Vvn#A0@sSL!NI{t@}a~C5JXN%`H2R9iuPdxn|uJFh7-2=yvii)wQ8589mx&8 zpXa)x0UjA17G`<*XC%PVv?1EoV`5@@nPcC&`UNMLFa07yN-RxRu)IHgJw5w-hmQqY zh>T?twUhV>JUu-FSbziVueTmP!JAUp$j<>gU(5@Tq_H!*ySrwuRpX~ZMi8v0;W0C9 zZCGbLz3&fS-QHKx)g3VWNj~Jl+;joh65v>90i&qh*Hl@4grzvT?`%^VJF~G-uBE1S zs4YN$F2l)*Bp6uV>c9TPx0Fl_ZbcRo6VrX(=VP#>9z8sNG#;@!Cs@~=q0%dTWleyr+aQh4BBDDR4NX z*L<<9YN)8FxGOI&e{X=1R=NFnt0&5!&+m z7{CuQf{ki~Zcwxmr>4LE$!J1CLeG`tqTsg=L!OF_YdVgQv8IoY8*+rLY)@tE=StlqxEj_)Vq_T1g)l=m!i}R2pV3f-0 z6Nh^nSfG=WZKSU+#>371JER{8Vu6b;Dm?1hv(Z~Vn={tWl&tNIQ=fc2$<*iLPSN1z zp^yHMn3%{MVa2)l_0`ThgOc8tx1MgN!JR(WR9A1OnbEFmG<+HyMuXwtn7yr_(9x2s z7)j5Y((Sj6P~xP{)#Z&5&sfv*_iu9SO}{aC;nJTr_ucs}R0&nCxf16$uN(l3Pp~4}K#0PI5{0VcO#bpDE&|bv9#gRv&$ySHn;X&A z*2Y#@S^3AlH~sg)$xeU&x8*1E1-D;qa~EQbkefSsh8m#jh{~0)=l5eutwGP3uYd&{ zjX8c0sdbrTc9Uj)`7Ac}T9U?_Xa)fz{N($GgYU<0&IM_xt6yc8aG5y&_4w%q38{}3%qyl-YbgELWlIJf0ec;cRbpk zj`**;dyIvJ<(QPX%@5(WTgeTZY$Uu5^X={Jav^>fxo!cs`OmSq&PXab1+F*UUw+Sb#ZSX$_g5sM@eRRKpPZcR!h@ea{`TIHA}_EpTZ_Chw>1`zz(fVW?F>9mRK2uy+{u2n8Dn0alaQQTEdlG{;wsU$ zFG&CX{ktst&BwnCUfuTE+{d_TKR;i39e-re=(Tp;@q-kPtZa)KXQIH<V z1USw3Wn=<8?%ur{d{@OtGXF6#5s?hwOE3~JKUtJJH*RzsE?z!9*a+gddbP;ud%k+c zu%qXOcc_9P2MbURD;{_ zMH>7nKR7ruw5}w3NInsT13;ZC=~K|rV9xvVzx_czqzRdsrc&;6AFs(0;Hnw^OS~AP z$GL&PVWK`^5JJHDGwLZfxWEGRIKAU9$@o@ByE(Kt>A1OV#{B&Gb6(@k?Y+h=xrE5l7> zFOrh88;xtaR~!wGO!^Q5-#3W=RR-xoA5GSArp?l+(-WS z?0BP7Lr5f~lCX^iuZ3(fL#}#}03#BgytY zLLrr8Lq9eTzIy_ydl(9w5Dz24z#QgZ2${8H0~2^ymPSOJ>PZ$T*lLV7qKG=o z@l%ULkH?9P8lF6?W+=P4J}vOc zM9))`7$JAR8)3|>t><2egEr6LF*^hS4&mVU;&781u9>h+v`%VQra2bT1`an*ruIv$ zfIl^k{`tH$shg|44e)0KQ>aT)PnM3UWOfteR?(sRBvH?wAKAhU^+{9P=qLUB2Y^Di za!XYeqY`sWB%vD-u(%|{$LGjTjLSKyAY8Jc0R(Q|kv70r6oRl-S6Hi&S^S+=qysg3 zevDoE_pU9Ajpt)Bk=$ZJXdEw>s}#Q|~K0b=Qq^hZlDLG#P` z(l1!01T#yI)S}$?kyfNQZoqzLV8MSgh{TemE%fusr$}qkf&0za1nM8(HQ?29xx5Uk zKLu9*{!Ka*!3^wzYgh`93bI5JeE%r_tCT4zFX`X)elq`hjYagQxsjtxvY{~zp)U7m zR7jL^C03HQ!@C*F2pDDu`hX-!=d2Ol4g+_xlrMn?t_H`A%`qW?{m}UsXaWJm#3g|i z9}F>EX7v2nn)knY_<#R&$OjTG-bO2Bmt|v;f=gh61MapSTI9)`=W7VMR-70t9SU+d`o9NLmOq4^ghf5w?N%zyIu< zZ7;!5e8>M+mVaQ}{NXtXF>wtUP`*RnMvUj>W7ou)Uqg(IVQfrHO(&>d-L9b>hxm{f z9>u`^(1v8kIP5|2G+d|__F1D4n|D|&5vx?#JG9~<_q+0H350G43dVVd3Ys?=jXZGC z9cYw4UoVO;5C@$?O_W$PVGY7p7(nXN_&J8D`b}k}`+0W?e{YE4tfFT~Xea`A`EkAf z!!&?R`jy<#PJ)I#s}o0|XPcQ98QS`KV?fxAGWEfRh@DKZ`XPl|w#J+9Z}amr^>}Gr zi3th)xhFVDM}e4@>P5oJ>8Bh5($apePEG<->@(UVn>08sbw=H_>|gkx0j{g79%9V*Dr;i`&-13p$jG|g1;RXKfg6D;!Hrf{<|v_;_X&W!W$pN1 zkMP*R>eh#eJCdad|m=dTQ$aJ$?P5G%G8sMbZ$eW;TnAm&NziUn%u8=rhM1zr(tZN;1JQeM7kI#z; zmhY_+m>e47f|%Bt@iQM}EJE%^r3RIstP`b(lhpZAOz>OI;w)5TnfBrtZ@e0_5I(p5 z;)&g#NCp*~3c8}m$s32@(yJjM{19g!>gPT@JZ$y*_wRWm0l~G|+1WRb>znjSh9fLm z17*J0iV#7hZhC=|@)~5{jimkBkg6m&jxa@xRzddkC;OI#H(R6khWVGiENPN^hKBvW zBn5xwCUxvIhT-NVFFE;09Fk!1_MRtwDXFPf%$t1N-}TzvtvQe(JndBHqN&#ByHPEm z+3;FKG3e$rgBjJ8{KU(u3$d&>A7>Z|OhG!w&n3yKjuS+{?J;u}&j{)f`bx-%fsYkB z+EiKjt@Z8OE7ZhEIXOAvf`aR}da4$Ri;JT&BwRmN3-mz5_#!+!-0U^)^9PI(VaDAK zV%=rt&FQ)p7VI5W?|x=|md``0U(6*_PU12h*GS4|x}+m{v0cHgWlzt{#D-U1XFch| z;C4j{;3a`HSCWyD(dck@Q0G2TJcA(3vnjnIeLRw`+qP;%`W`ZdQCJ~gmzQ`ztW-sU zDDf(Nw7g;FU@69Y-Mr4t%GSnaejWn$=jdlj=W+4z)9|A#@xhLW zJgGJZF}swsG<(B)_b5FK#mJOPM8*0XQwH|Y&pqt7Vw@Po4yhfu)U&OP_q_?_Gq){S zpN!|i>77_&P@3Klbbj{fO7hk2EEegumM%NRMX8d??Ax~$x=<$;0`wWy*4F#B_V%vL zK3mrJ0s@)`96T4@H>6ZJ6D!~(h^3~cPFIemf0za4xB);jFc=o<@=7t_=(AHzAqx7p z>QHI#^%G?q_^RpY&J-CX<*ED5La&Xd)a1nT=g$d~zwFiwJDO>tl{^TVwm!4SOSn$? zJ509(==^)^kVeXDjF~5wN&7?3U%&eL_qe7_K8xoCpU~#bxSgvYY(Gs`gN#J>sO56; zRZ~SS8m77V`Sar`zl~DySGVb?Zf#)D8PqSWD&@_^ya|ARm`eSC zr5E=@(;~tPb=txdJJKb?D=9RVi+x#%8&F~z3JD4MWLG_UjdbSSTNDRcLs#btmtk^} zjfe;r7uSjR>CxUJNQ=7z=s}kD!3=|if4e5s1vxqAy5ePz95M|`vb<+Ih|<8Xg$3e2 zdq5LYw;EM+`#lUu5)X#LLD_!fo+7qfczV`iXmV_@(LYA>{cjT$_A_U!y0 zgM*B?TQ!b#^bHLbkMwL|D7-;eSXQ>)F5|b4p9qW^=jPckcwI9K>3B zt5#+B21@VOUYct3zOIHN&F+)x2#%+a&fmvtD|Xc#P5$+tol$k9VQg=)BS4LhKHWyw zq*W`nPo7a1<7t(*_y(#T^!)hgR3m5@j71g>uM|Q43$O*(!|ELgrl9s(<2rqTrcKCu zeWG@5ZtnR=xrJPx8Mt*LB#T{%m-^PL$^p;Jdzf~CpFF_9Ib`#_Ot`c9pqMLRxp}j% zmZha-tr~7-W~Q+tcck>bL^B=_35q7q?Uxldd3hZcHLo|@I;5VgCrzf45kl}t?er3s z?6|P7up9<-_w>wXq^C=nA)jp4-3J#v42 zvZtKDXT@W^IF6Pg(CDtD^5E`9dU|@(cgp98x<|BlPuwzmfLmarX(w)f(tDbSbQ4*K zH6z91!2;~XcZ8voI8X;Wl%9{zVGGG6A3U~5f4h=JX#CHf@9z|T!|&QTvNHP+^?b=@ zzQ2BfMkh}3h~GpjRgEi+M?14F5Ypu|5r+SPU19kpJu#iQ=-R+&MvS!1UcA0 zx$qHf*64K&{Kzv()p`bL-IWv#3k3K9i}4gH2zaE~k^yOj*V@%!wv+>h|4D~o zgD)Uo&brbn%Wu;EUV2&L)p(kfl$12r=P$eX2m$c(3%`F~dn`d;$e%+}w$_dEN(E5h zqFvte-Lp798c}`p=uyE%LPYL_|0WAXAni>uxjVNQd|ez|XT9U5Z=#&uy?YmvsKzx1 z6+g~;(+f1%4FL4<wS)yihkq z)&?ug77nKUQTv=M57;&%kw~PNnAXT+){&1w6zmUT?EHoTdHZ^9ZV;Yky?q#^`}Q6m z9pOwC!!srJuh}x=4OX4Rg%k%0dU|>z1h^lF?}LLnk3N_rez9f7kNkPMwSYF{`bE&@ zWNcDTq&>K8bdBcUP5})_-@XG&=K<*4QuPIoRL*374NOFLj!nwtFI z7i3M-b&(sYu&wC2!~1ZOnVHFMHc4eyGBBXq6T>8Wn?>A7*R08B%iFNb^zWzNJ!!Fa z)l38f9SV#9ug_-#rBqSXg2CASFJDIgDS0|17g&7^%15IA0mEdToOs|oVPf%mYk24h z&sQ}8`egd(@RH)7G%tdp+ zQ3eg7$6N)vVTg&Jg`cBuiTY|L)x? z57fMsXNVw9^OIr&q3uE*U2$wW`Vd_i@X}HyWD!vE)Qi^ERujRrF7ba|J0@nL-f6UI zc)ZrNyu7gR@B_fr|0H%WMq=pHN`~X$h4?-N+kta!$pO<1=H7m}-p(MsgB&Kn2&J!8d)vzBg2>*YR}%4 zl?C$3$+2%EnmB2Zr39ESM6&~EGLEmP@di(RGXn#0*d?6|%+04LKA4;i7vC%S;WAqF zh6(p*q%4>kx{{r!h*PFEVn7P`&;Zz2L%IwS-PR}St>Bi#Lu#jGSfpJt8E&v(~4-W}hva^lx4%<)I4l=FOYx?VTNaLLwq7Kp49!2;j$>V@eIA zFYxUI69RxmO6ucxuD70SSA~TUPNx7~*0BtIj1|daR+2k-?|b1`@Z!abuYA0`b>yG| z0U4bS3%*ewKgEYQIX#`n5*3%0vXRhW`1n@tCaMjE0m#NCI>O1*K7Ib2y=P4K-#9=w z$j_b&^wglRnlv*|d(kn6=IwaQ%+v*(%4;SP7hOW27Yx0UdN`mDD1k{2sY))XgInGn z5tnCpkn5VXw?0QM`H)p|oqlivbrWnuObRUcrlu1^hzl40N%YO(Lb}0iWkv6)2cpltE||r@5^{!=+C?J%vl>1BtE1w-Q-WsPM%Idm*3!>Q6+u7 zeB)grN1b~ptGY;!aTNb@?)(@Tl2%)wFPDo;y{4XK6$*Rmq_`MM!y9r$bgeQOPXVB5 z`=micTK1n_jNaUM4yQ4uP{ zLRh*^4$~$jxs4asgPN z_5ov*E19&{Z>!S^sBJk5><&oA3@vPgtJ8D@Zxo>zA`}&GRKt@04Rx5`X#emI*U*p< z{H2v8*Va=nQ!EziuA=g}41!u?F=j1oXXiDgW^-3EDk=+j2|W9*I`ph|Te@!)TZ z?V1@G8O^{ErkX9~xsnF)!q>@Y@;|(}UdnVCiL{>FPwzLjcs!(w)Svx>bcRmo2P1)f zBjlkNaBcbfR-=3*^f0gv@)9;mldjc|P_NlEwuUw?N|=+iOa7_{(+j;wnnj9Eu{*Rx zZw5GoVmDZRdO)~BTp_?8+D^9oyzd#EGbIfN&?|Z`EFw~6dL@~OwQ`}t1=+KN{#Z~5 zfzykg-+QpJHM&$9y@aEp5TZHV=XtlGq9|aRuJQwoXvQJa&v z1@xePNie(LC7Lq~2{1rn4RqnO-G-vG9!CnLh%c)z&%L{MFM6#E6%~<%hro7y&7fqM z+qb#p-&fsz;+!dgRQ#HOeH~wYBi}%>&g9(SR3WXoFW5%@5+*) zMZkkEy}iH36<<>6-oM{dPT1%cnr-pkA#tOOP%-lN;$jbMW|;D#qQq`n?<*B}JRbMY zE3AcWmqAfMVFki2FJfY1ddAq#?FHId>Bgp}{y~n8D?uKQAGai^aV7Zm*lWN!dJK^t zw=;wiPfHERNyWuz=GTkmwS(Py17cfL-7w`Z23)AmEF4~=Wcagv^D{j?4IMH5i_4zj zBh(?3{nzNDY4zUI_x<%>;^ks! zkWvM0G#}7I42vqA`IAphxm+XlJ)GJx%+;o<*t51 z?jxzz%isSJZERLfx&*z#M0MAUl{5!+f%o4ZgSM675+47f<%05v$jC=!!8X{+Z=! zNhlNV!Sf7%B5^JgiN@bk*e6E41zB~=hI{+HKb>hnxFTF?{V1xe zxOfg>$@sapGxt6-$vO~v{OiAz^47DX{T~334ww^sBmeA>Cab_g^Y%VKM}Mm9mpi|e zgC0Zz?tf!(8!92zIi{!`T9A(nCREeZykGC|EsqVRYkF0Lst_(4ms*w}YP*P6qiKjN zIPy-5lf7P7Cjr3Cc8|rr@CGasgwYc$Y;3)eisW2?7zT6@V1`HvGM1k|00chDqM(nz z%Uo!`X^y&y2!4ya1{V@*NdUb#(U&=7X_F{>)5R&Xwa8U4EV)SCA_88Ip^l#6zrQ;A z3dUA0xmJ(n$MJ86PTb>tLVj!ejVuT5!u}lxC#PMAfg2zPTZ{W1r3GoD+&R=9+3gtL zy0WxGcpc=~2E9!VCyAb_S=IPUY#wtTpC!^qDq@cJ=~j!! zyGwU|I)%DEg>Fh-ZftKQE=aIFyq7ljfucA2>Hc)f(cW?rE|;u7iqFJ0D!Z2 z8^C?wT-&!5WA8X(57@n_9_E|G?Yh9nIdaa;Lj`FceP zSCYe-D2P2B9G1?Y*z2yGLmm!~O=+MYCosJQg3)OfiU=Rta>0Sc_UHj8pWF%KpE!c1 zixtb4R&N4xG79YCPoO}DQU{HbE%q=L66&IJTNA=V9(YNv4ms{=rN3>zHt7`ffe`E# zxV2t?aXjN@_wxt_N!(y*p0ACR=eQa~JSwufz$TS1z{kf70n_2#pPGzoWGLz0Hj;dt zk-&n4HmibjPstsw{kF`a=ldUd-=;-JmWF?lD7Fka4B%9FE z*o#a+iJp#biw~Uypt{8kLpG!#yj3QpJ+Q%xWeKa1P+j_Z&8&fT$gICQ{gw}?C3#6Q zvYM#KL7P`mvB}eudhLD)K&KupB}Q(4&!{m(;;8+hveO9MxcWcdr6s!G(c%ZgSK0`C zV|rf!KofW78sXy(3mZ!=HXO^sdfNO&HpiGBCGRx*nJl zH_^WcjG%gH9jnr*8cOEzMEbj0i&t3*kwS2TkB$NbfN5GD0x<2r(`yDM%9dNf$$?iR z8^J&_0a3Uj0Z2d-+~Wvii)~WhjOn9WoN?{g=9c+;Y0}emQABX5r7UTV8q5$AT{hxH zpaE+a+>2B&IUK5e;GRN-1L(7`IEG^7zQ2_L4~7|=3o34Nz9&*oI(VUs0Q!8xzdAY; zj7VL~Q3P}-P)A8wj)au@0x>=kZi+IsZ}5yeE8F<0x61TQIY$fk46PdnY6> zFYkCjqk;ezK0}1O{PKY@1(qr{NC5?Yfh!5vu_9$ktbpioDcE&j}rha!mAs zp;VW^GQ0y1KUvo(Dwv%1c@!R{lha$K2v;ZklEFHfn#wdRrnnl_i(>&WYn-jk5DqHzaP zey-GyL&;=ddMwi!*YZn{Zsojhc!SkX)Zf4H)|VbFh@n2f;EIbn(h~Uk%)A~?d-38h z;Oy_BI=^9=ze)K>L2d)~0t)Y6N5-j=yA9;gl$?obwj!_e6HG5Bf58kjBt9@MsW$vi z)4Y7}lHy^YgCV1S5AB@0L;6|f+huPEn_coUtbtVN-^O(Xm>ASoQ;TYA<`t zLoV<>pL3b06UOJ|Sxa@fw+99W%6&XzA#fzS+?;0xFVRiA%CDtHn`!FOz{ZBIzTg6R308`2j4A>FGOQ06F=yW16W7z>29Skr2}0NHEgUEL?5>w76*-80%rE zLqNc>rC^j6Go@X}ON_UPjY2FL9q%2E8WF7+F<1hrAAvG~HT>Wbl=)g=67y?t+s&VE zMXy|0xadSsx6Vg_ji;K8lfj`u2<8G84^L1O!mU+n5&)L=bqV_e_NxGsiitu};Oql% zllbfpX(Mct^x!Aum~WcAF>N8)EFXItH8~;83~wCqUPErsVz3Ef#yD0*RQU4c%dT*U zkHfYVoeQ$kT2$g?419d*ctu1+cE(*>19m??yLefEz9)xJeG5(bTpwTSCX0>tyxSuI z^L4vuXmI2|lp;0=iKJxNpM~n{sU`OzgvRlQfB!yIP*Bh=(l0Ja5Q%Y|_M0tvGIsfc zUPI~47mB*4uU@EeSwYkP3si=3#WS2Q6_bB-pWu8!^TnN&7(eJ+c=39e0tc)=!w>`x zrn(R1VD_jVTK8LqtX*GTXuL7Mk}Q9(`T|j>=bVlA?>C3?>ngtcKgl0T^*qo6Ak`=e zjU&&H*bQ%=o_OH`+Sti^k-39yF~g3fJg7N>5KNz>#$`bO7$a`l)sDMs*s2mc8fHav zfu1U1)-JjMvF3e<-^QSzl=%9cclooK)nf7OnFc(k3H~-eUyL%EPz|t>&B|Z)`jZRO z;zeRwt+tg$#7wcD^Ps>K|G}iR&uP6Xy=3wz&@4donUVr(oa|H*)YBAn}cNZC%sUd;+4oIZ0Wtx2)zZEBTIuCY-?)I@4t_Jyj!k1ZECP~*dv`h=#`k(hggC`pyx5Mj-8zz8y+1W zPs7Ay(_mZkufc_$gQNq8Jz4 literal 0 HcmV?d00001 diff --git a/remote-frontend/public/logo_light.png b/remote-frontend/public/logo_light.png new file mode 100644 index 0000000000000000000000000000000000000000..639c5cd2359d2e3f5e43169ff3c4f507abd67887 GIT binary patch literal 15922 zcmdUWcU)6T*X~9XEQksg&`<=#a#VsKy(uc7APAyzq-g}Cmk3CYdK8f=C-~Hb2{&9cz&QFt_nc1^v&#d*VwVqi78JySJ zym9wN2!b~2XrD5KAf8AF;&`~83)CcC{R4bL8(g$akPvkE9P>X%ifVK^sN_HzYMq3z zO|Vh$W39~zy%P|W6Ta#DwRI4rO3*oV;^J+N@!n7Ft+idyz-8DH`6B9-=+dEw>7o@o zsxLwpQAQleYvgul9I`Ze>wZY%qKo{Faruo@pT^vRl-{Q*A61Bo$Qz%hy0Xr^^gJ_T zmpSXrP^`!hSJykV9}^j6V{Ck!gHwkxqj(32&uuULpvntnp{NhP9Vl%|u3BSfF}ME{*5FL@Erdn=wE3!$&Z zS_w3aLa91U5b;V=nOd(971_483B3sswN!1*@FE_F^)CP~5pZ1;b*$Lhdh!BuyH|_B z+8~M5Yv%X;2<0@T?O-lUtWW3ihy?tQ#=wHx?O+WP-%^S~#S z&}~1Kj`d0hs>zljY1G-p!w*Oojq`buf}tDZedGw0; zI=Fip#vd`Frs=@-WbE)P&YeeJebbC(Ur1zII>Vy+OI`pvFlRBucuYo(>+{f!zscAKv%kuyMmQ^V$^IYuIKoT3?m zc>Mkt>S&X^6_1{ReBc~SfRLT8Epca*AH%^z)eAvvTQos)<_PZFLyhZ9o<599--9E% zyiDlH+j8GagnT9Sdd|*UGjg>b{<7z9czeaK_E-Z4N0lu>647WY|6E;M9nq-ow*di- z*fr!g*J}Y;xpjdXf{I^pf(1bpXM!(*zfSS}vf-$c+NZApXsR5gY17b1t~nBG1|Og5}B= ziS%KaKWUukmsY;Y>WqE7$lk;e8T8%Ei%Sn#14&lYhT;8p=Fq)ctCxBb?=83m@S^WY zR)i6=g_Ptf{q?Ag=tmYWD~A(ki7C_PAo`!F)08lV+2sD-lu(?0f#+F?P9ilUBTnJP zthZU<+pVzcPn+uN6>=Mk555uUS7sb;zSqHHd8IPN-$PLA$zmt&%>mf3-NTOY!(Rn; zt3=WnQiF9L6)i6)G_=qbc{X|+Z&r=>cz#w0U*Rue$@_{IIRU?1IVyt^ylH4LCl=P= zEt7gZDaWhyvGy7|9hrp0`9bxbK@#CC4YmApY-LR?rT#^MB%IY*D)!{{ipoM^!>(__ z6Wgh?Cga)Z8Frn|`=XLA>NQ_gl9C#P*WYrIJHcB!W^>1@*#_dF$^uy&2eN3j4RaxP z5x;P7Lgf7mV$zNMfqgAH69L=sIq?|IHQsP%o`w_3a*ED{IhEn*>Q~q4HgD^yoQ-zz zRToxpE;vgWB4D5HiyXJv7HE>3%QJu;yVys#W?ub?B2UP9SatZM`lfv*)z@}DCqhN_ z-3kq2UvUPJr(xFH7I{*sdazWTArmM?AcJ^wS2ipcf@)nV4hCLX>*63qh6lqF?nT0s zoXdFpxW0TjZ?v#A(Dr0N?Jc3MrPqajk(F!9Oxp4XUjhltZDYb0d`rUJjSzmjwm%ny z;+FggK&Tw1F#c8rx+iT7zvt&0;;g>=d%ci*VyfFVd~@8zX!#|Er>{?f6J7CXi|FM_ zk5?@Y1vcs5`83`weid}=Up@wnR0#--kPC}^RQl}Xy64_SITu!3=GG-0TOw6lop01H z-;i{peuQL`HzSzP)nICxGEGo*zcf{ed{J)Q&cWlj&67yai)-{C-R3<|{~W0SDc&?( zU_>)Y^QcixYlf?KtJV=;%~W}USrJ&&4Ya29i1aD6CX8W(A=Uz5pRGCf+s|X}iYeSY9 zaS%!n2r0V`f5Rccfmb(*rSo+t=GWPeENO&mWwN>`{p{Py5CT>BE52Q#y3c;11n-n4 zsNM(q@$X;VE0kger$Buas6R_u3m0U`JPLN(ZXat*Fvxng^-=uNdX`;<@>1L&dX7&A z#Rj95O4Hj(qBqX28m@+@{c5a2nazg7k^XCj+(UlfAcWy1?mXXYg!os0$?l{xw?9+%KzG; zdtG&8lmg3&jxh#3U;bD~FWf6$2xXF)akgR8=kx08@L4}*13{MScban|pR03PUSckh z0Uy6I``K{5L|O30Ud*R!Nb6qYRC%ZbR3=9j%&ld}To{5vKD&aI{p&S}I`4{BDz6IMa%()W}%xZjVNr?@xzi`quGKvnbE{u*e}}?@;RfRDIejQi8pW>wH;%K+4>* z7=$|7*H^TYMouO(ilY*y#uG=qD<1MVE{W)itEX`cDd|y$14q=fjASzgLZVb94SV33FVi!#) zvoBvNU7%wP1pX8XB#xGkU!jRDGS|KL z8Q~gM1nZjREq_Ez-)~t2|D{W9xOr(@rKJ1}$(!WAIq_rlm!pGln-7j*nPL80vZo<2 zfp-9tpa+(go7g)5K^h*uKo;T2lIUQJ$X*?J{kw{`Yao=V?Bo6V>z{=iD$5%TG@v2nfSG{AMMZ$(xcqZe~+I3SIk)z%>c2s<08 zyYJ4;2=20-QG=lO(@HhCV9V)h+;*%qx2#CIIvp0N14d$-6EK~Ph346+vrK(NiT#is z)QWSt{lu!quqOVitH-R^e)50U{H6K)8iD)2>JcOycZyFS=(j4=$-E#5HbStT0R(sM zr$fnl%HV~S7SN8|2K_;{x7+jH(53Y!jJ#M8T@bj9(MD@H7ZtE*TpOY63?JKcA4CvtZkmP>uiC;VWnW2RNi5_fd2J+0`l? zW?kLirp0f;ee&Lx2Le4_b}ba?CVauh+#JP8XHH<^3WFN^0;P9yo}C!i?0QB}h^s@Y zk-eq!kdBLEt*OiWWLW(!+p~ym5ivyRFbkt%9u!Zq{c>8KVz8_1-UO>F6O7_u0RnD2 zQSQXfGYOpUi-|YIi2l(_YN1a)S1@BJR9RWqljq0NRW=tERI)<5UO|<>s3etU^0xg{deKhij(RqH|owgyW2L+1|&(glL^W5Y+zGsP}yP62|Z^Shq-9ryIj=3Nw+d5NM7M$Gw;hzSo6 z(x)}^xhaxHE_L}Z=s`2=8>1Vx8*}{aGufQV;V@d;36WY|y6xn*z6x@;Ql3TX^B~6Y z!f|K%^A-plEjdT(nJQNr(NyW~H<(9^HT$QHRLnJ3`#-}l+D=$pmTpijz@fgIICF|K zG2!!gMn(qZ?Y*nnq!`;7$q_Qbu9E5W<$T*N>6v;nVh-$~xxZdcro-%i=l~H{9p-tLIVXN@bd)zTi&)u6z zm4$TKgcn#mnE0@9@WhdxkjR#xyf#MT$o2V-N%?jDemLWL)XqjJz7jWQ?z~aJyF4Tn;v$PYuv>#OWr1< ztK|fPap4bYYYHP+c5haF+H$D@=}JlboQDXwVA zY8+4I`5|>zRBL-;+QCpO{92)FlMAQqmV+Vm^H!}TWykYmcySmegfdJXpd;Vmu13C$ z<(Q|SBlKN6a!iw_#Vq6cl9v|s#5GE%+M8nnt=XR=RTdxJ3Kfb*RFQ0`qXSz zW3iO|s14oqFvq~PMpZfSErgrm?w@t&+`}gWxLisV!!ci%XFgqV69I#7B=Di${vw%R z1s;il<*hSeq9^b+H@icS(Pgl7+XsOCY|v~$1_*t1?vL9uvnlbmb`Hp842s|nPiig5;cN& z+%*)8ecIyl{UDE1(CzrHMllGgdrMH0RvjPpva`{HKa_1(JBQIfAyPiv`O(KWR?c~i z^$Qz%hT`w7FDTFr{23*u?NOfLX?5WaPs zo)oB*fvNy=za;{dS>Rag{QfxLCVZFW7#{)x&O^CXZa$wLkXtU!@Gb8Q`1ri9 zkrr^-ranV6J}`pXKZ}6pRo-(*Q5%*mNyLj+OO^HcsTEVR$3ii2(8#b3 zacNxHj7F>>-JTnsufY-aC8XR`rRSPa>aiW;SZ~r;5r)ps7+Un_@XGKp=j1hU_7Lt} ze5LeMGPH3QhYV9t1wM8w=*GXNgt?ctb9N9a$#4qBEz5hHW@qF!bLgo&hZSHCRM5d& zT+wr9oMJjE02gSfKzLZ+FH$TFk-u#Z6tv?VIejP?%*PD4nP_^mKmUN7`{z9v4tSQl z>$mb5?36xlIio9?LrYg+bsw@ua>XgZQ=O1C}a%*h@l)yc%z7&uXQ>Ej2R%()=A)n!F< zB8fVC8h}`g)iVxA@Vy&WHMEg);5YZHw3S0f{HJ7tsr^S(p5>oF*;mWB&c6|~)yDtM zt^9Ai&i`kd4dOuXrt$n20`wwtv$IH4B}emAH@=01 zgDB}T9ly5%41A)ou^CtM>G-Dl`3boP^GflU%N#H>rSSf014w0==Eoqs}LSO26v9ePD;p>OBJ;DolKB?d3!r}JA4jf>*_8?>}BDQi!Ymbh5#i81lanQ)d4-X4+5Cc z=u}Ic?go4b<|E-<>iFx73`rEiC;q)LY<~2b`=P`&3e__mG;g1nnTvf~Uktit7A#!9 zBk2PQ$||R~LnVtcPwRcg#*wd`$zNxZ!uyR(dW>um5g(dtS~(!HJ!RCw^A+ioIsq+T zv=OQ!%$m~u;zlTkpU$5U(G&{4oL3_Fv()D{>Ek#d5 zlDRwh`LT;f*PvhG6mqL>74X)@>6`;DnP$rS zz*B#g24d5xNDwUzO}Ac83YSY-B86sOo^MqTdb}75SX!vWJN|v!k$(GvC9|~YgQ(}d zWwn!weRId>jSoq2qN8?ARZ?AAp8E@tZ-Tj97qrNxm<5f}>8ej_hN-EV3=g%JybwB& zQaIj!+*6^8QC!3@=Y&+A%*A!y%DNip^T}`4O#a5wJrMK>UMYR$TyD+{nRxR-s>vNr zg7{W5ch#8CAxqgzHB!edr#|_En{@SCWZIq9Ky_ymx#J&>&VT$0lV1llXe(z+`9moB zaWev?Hq^&eKdRFU=!nKHwHnsx3?M#-gn?hl^Y3gje%`n2r=7=1FyXl37qnVzlSUw2BM!!Bdv^7TsRXtcmQ#dAXv5GsAq zvm{l-?Fel*<}{&W{Dx8VK{+2lwZZkIjK0$)d7FnVmKd4lTg zek)>3?VPj$e{m@KI^n8d&zF~RCb(b(3dFR^f>qVdl^9p4{yEHAuDkGtsKzqrtGfp+Mr z`k+%|Xm-H)Ar(}QukKKGoj$)tnA?QR2rcbm(_J+cK})qR=_cp5fZs|R3iE3enhgk| z(T7eSvKU;-nRPGkb|7wQ<*52|9rB~Oj8MyD0^r^?oJx9#mb?9V=Ho=Q>PO_&`j2xh z&DN4gO}Qp*d#lQ zMriME<5YOznI3L?_q-fq;WSiSe2fbn*h)12%SSIFz5?;l3^8EFxRpUVdNG}r=}Dla zgO~-rCE$?_<%G()(XUoBj~8WvBqCy42?arWg0Zvmh?$8~k+k;vDc%#qG4!$CnbIz` zs<=G@Z&@G;p@OvKANObg*#!l);y75Iy~Oc^MUnu#p738k+{8FGV0lj>@}hCuc*n%w z04^Z>bKMY@*AMkf>cBlGV>r0?%<$`&qFEyWTP!@ryt0g})&1?;3QoaX3Ii-eT2yj6BsgWO}AEZ&s|sur8|~PZfHPWYhjg zC}kDg;35{x_ZB!uI>onPf7OgIZr4v$2MsWcMBpc{@Xp{>EJEcjNPyrF8ZgjeuigY2>d-Zzpo57 zt9psG3Cw{}JgQ;a03+``W7Ye7G5mzZ{!!J6hpxU^8Fo8d!doTvTZUyHkO~ND8|>BB zMZ~w-0Iu&{8j17<#&3qHLr&#%IBXFwu+1?!^U>8ge8D6HIgV+hy?d>G)$y#*!ss6P zrVmk#-^$K)+g4USQt39qb+$i_nPb>pf97gnad~Q%@jizLPxzyGIIc6{4|ksIP6uk; zIiYc~YNBZd_0_TVvO5o8 zFP5rtJ_1uqJ=1t@L)TN{{fYHnSXB}osnMD6^@@4|*>CjVH(Qxy={(6A^?FOWu*cdT zXa9O_v;gB7^);$}6SBN{`dV6J&2m3Sb8jGO;T57xuD6=>Ekvd4?2w*DMW5qFyr%uG zF~5x?&RGgRkKYHQ=;fi=!NyQs_-*8E8Um(==DhzrkK!r>;@=Q?=jJBC!*@-AuNIMg z8S}TxQ8}xd!Em|6r&_K>-F!MA;)QAlYcL`l- z427&@OQb~jvcw;*t_L;TJH=-y8;(#2NS?kiGaECKHnCpcXkp(7+0F<%>(}pTYv{$F z@a1n;#%{)Op|Mt1v=vlrJJ^k!p(*&{Z!u=nx%{3vZR`zdEE+`|3OKEvSyV~PyqBZr zIOx=w(`PpEnzydsnBdK)Gfk>4^dse52O(?P<3y#WxTNfw(Cl#O^A3YlSGY){bUp$_ zE%3@(2r2l!{p*E5|D9)Zb*;)5yb~32Cv5t;{Em3HPI#MkQF~3GEw-*?`>FA&bj^T{ z%gXxSw7!yhW*8n#0}l2ULZ6*p(UJSFVT-e4KB?X_l%1-);s(YD%0?jl&ZyUalS@$i zy3{r6Dx-f4d2b&;o!LXYZ_Kl${thjzSkk7^tzaffJ11OUafVUM>@ttp+&LQ8YEiac z*z^(iQB5%&i|aB@i}nxapn07MCqcf{JGLg#vRv9Zy(U+-e(c`rC?OQ=fl!M>c*l3y z@e>NyE42_$niy7=KIB|{*J{&x?anD@P5{`?mmhE+AL#Jic1tp$x4undp3-#p;G#@^ zpy%6r-96-j1F(&6V)<1MMCdOkZlbpMYvKs4Ie*PWhp!h>YjOTixi#FGpe>OO&fY%{TsPv) zS$|uA#+js?!)@fUEs$Vs47Zt($QO8s@ZJHn%vXi^O}wxt={hOpseX@pZ-xKXm+H0? zf>nvt)ubsj1iPqQJJgkPMJS>Wr{L0)4~zinx>~|`2W1I{pE9lfM7^#Mrb*g(SpNRs zDlBV&Qc+p8D_l%!yDm2TQ1VS?z7eW-g{J~L;KK}~J~Pe6_$q$~0JK~sY{@^=_4||e zCe4InPt|GZxJS!J6uhI(3Pq$zKJ8a-w~w#By{RdCO`V>PBmK2zxiwA*M??pU7%6U9 zyg6v^>oGwq^7%9`wO3Aj5U66W5ye{9g1dOP(g%bfUM>2qX&z!mU8&VxuG^FVP4s^~ zHu948jYfe-?{#+csgUxJ<>~+#V#CklrEIU?UJLY&UPYrf5RP~0$MqUQ_&r`bW zTSnW}<{#JNLg=KP{J6$;?|Vx$<|&4BzyJU+o*i9+z!SUuN@lC#Fx6-jAv}@uyRhZUrQJ;MNyu+3l}?BPx8L zyA4(g9puWYx?WRpNtL2OWloi5h;h1mV#MZ}u%UYA(FflII9qJSPdjiOuEtgAFosRE zB}Owxy#lbjK@z@>@-c5MnbYg8_KlHV<5z3${rb2LE$0;9($oQ#@}v&2NlKJg1}DU| zvK{ES6_a;s(5M>fSgA!7+p+x^=-Y!;lMPv(2m9LGxP=2UU{jaOQOBtgqUyj}qp%5U zA&ol{kzYcA3&#zqJj2R+FcBLT}iK_N+Px#Uz*6m#ohN{~D}Zv-r(q>pnWgHwVNK^{nXV&pU5111vP>uP%wu zf9V?x>}8vh5sAnrj_d%KJ%uMD5(gqrMdmV7CN$3TG%?2)eh~PD`Ms|rfive)3Oj3W z`kD)11EDWaMn&ba#@BEf@Q9z8@w*r2l1v>fzE)^4ya^SFAMMLRo$2x2 zV`h^b>iG&+)w?YUXn@rOti$vNdAV+P$8tfW8DZUQn0J~g0@vT-js)jhLkjI>okMdK zVY7c0)>yl16fa_h(mlt!^F2OSc+2;CIAO(zUS!i{ENmGBn$s=V(Mg=Jw{p{X$r;)W z0^@1vGcJ=T?PF4DuHSU#a}njiAc~K7eqYgL`BjflVn*fo+BSE3mn#~lr+7(KfSC~E zcFfwL-Lqnr2x7WmFPXX)w7phIJCMK!$en?sC1zboUL=%ns_d@QlHsJ{^C&U;aM&uE?4LI{aRXAjf0C zA8z#=aQS-WibB8TqR^*Jv{x-XuROhp_BHK92j$HT3nEs#QuXxx?_Ce=4@atq`c*n@mO4=>5N$$Rh15+yH=W)i)+xquM`d+3b z^yl*mT#D-YjDzN8B~X!H9}R(n-dz|4xq8K~M`h_F`*)j=wKuvHKzo>5I|+ zXdgd$ggGNEwiM%=eceEC4obwjRheb|{54HBP?=>b&?v=yk$?ba7vU9Xk-T<3NP?ZQ z1C%D>a59S&sq1+LUpgKs96!N=291ga8td*G%#cda?d7(Wp}-f{?<7jnR;0ef z)G41I9g@HdmVQJUtZG)D(F~@pt`=16AJUI+{HpcPy44_u-N)b|AHU9Ocjosa7U}b& zW;HlrS6E)p$N(xh_s_}wh;J-a*!Z=D#e;xF!~s#aGzVY`ipo!%W&V8fciMeJ7DdVY z#VSvLuGd5X8@p08YysvDr}8H8|F#zmZHG|KzagIG%E=Ypk~P8A`yrHr3h$pHAfLr7 z)_Ol?jmg=Gd5|~GN6u?8RuF;EuWmJ}oWsngw@Ry!bU~_7|3acbn)=mC#!K5)QS8%h>bjRJgs6aYqzf zqih7HwFCuInLEYJi5wc{z==<7V%S1wjXuZQtX=N*XDpkuQKK1KyS;+8+NebbKE}wu zYt-7;k z)m2$Y^UnVr8{f%v*=CPA_SLL!*TMUiX;X;Dfa#DOjavxdFi5aTU$!r~s9QCJ%UjqE z8Kin7IDW{_^|A)%dyQ8>xGs8sb8z0mJ}5~AWaM1H7D9O0lq;W%L-6G;!y^|UKtNj9KSApsD+QY0 zv-3Mw2B`S~SUk3DB)h>pj%O9P#0oC_%ok|%?7Za2kzrE8v9A;t-k>o%c$U6SNb!%SX(n$EI0(Z>Bj%+ zM~$o?Dr+1bg61&}s1A6C>|w#DKp68!?*Cx%(HX`N)Lak#!hi_UmB0zqGFK||PYoQ^ z9NSD%rw12B(*2{x+D7Y%z@F+`ZL{F)#lCeCYPj>Zu}r0s?9&ZWfBWax)pv}^$hniQ z&Ms*W8TSk>QVRSp3kh~DU6q+WS}|9zJaId`yQH$GqfMAfnPMpRneQKa?Y-RP+j~lW z_6VJBsu^a7^i$TeNLb5rfUG8d4uhOIFg4ydzZ7lXVSFeXL7+YC^)nLE@c0T+4MXdl zgKa8{tqVLtTDt77DJOJSObU57ss!nh9sFvu@{l|+g?>naKvAe5u^@wC(WB1gWJZja z{h%l0Rq}`@1(B~!u<;_yV=&9--KAQF8!zP-gfAB5XFG~Z70%`Rne+FCS#`^u7uVN= zZ^B5sB&wx4Mu`;6^*Sssn=ZH5nRjN-%DdgDWwjx0@je^_JJBC#;QpWrpES1wdQ;q68q zBT*F@L%WIQPLsvGbBy8PO6MRO=cXVF3!-P}wZ-bqiUb^-%k@k^AXaF>TCdK!47szP zfJ?#3*LxkV>P1eJVkw@r>7H5fi>c;;{ZU92jHAO&&xSqDrTgUUDO+Q%(!0E(aj*T5 zL5*?1%F!(fFxXk=SWO3d6?KO0?U4J>nIzyu>Mo!P51GJs>4j*Ey%P>|)hr|1%VNUP z!X;5zAssf~(}=XL)(O+ZfIggN)Uhtn$x?_$cb!n5jsBp|FW5D^yiT}p8&+?>kOeu} zCZK&SbmQ}S1WLe#2fC+ph8~{nSXx|+(4spZS4Fg{xd*)r;lO&p>dht)oim zyGu>7@*Uf9%(A_kh;j1N(e z=I&=+f8*&RQpIA%hoe;22o$E-fV&go;A=P2v4D7q=$geZ^V7qXVmm?RMlXBXKb=H9l? zY(I8zj@ffy&&pI#ggg%DaO_DISXDfaSepb>Ut4U?HYm)e55U#ezdgJ?QdepnUIohn z&ntYJ?y@HKJ<$tc4ek*Y*K0{mI>UO;SC7}dbpvBUu-py6Jy2eK<$*VH;4N(Ijl%Qzp7O%Ua=Qf*Tmit6G7aTShlG7L@Q1F zE5=5j@@#X=jH0+jruf46bH9dp*Ez-MGA>h0)#tuw!en~PlNrU#3TG~vo15Y|cb{ys zq>(0f-!6LV5QG?bPO=Un;T=iQ@>}|RvS`L<)qHj|*PPh398hg5-MdUwOU!?qxR+$T zXXu0cOS{r<>h6;_5{Z`{Tkxdx+VAn|JXJx-dgAL5wigj)j-?!?5ZiESo+XjBc((>w zIqtqcY%4pr4hHv)#rmYVjbqv*g>GlJ7uU*(4(jb%RwsZ*oT)$zm_PERX!x8H!B`Lk zw*!4p+P)r2vo9rU8fi!x5g@}1hch^MKxhTv|Tm*9=(B0P?T-J~h^sfprEE}36C#FwJ(etaiLIN~T? z|@WYV}4N5#qFWc*dT(cz?tCx}yD(a-50EKYHq-`C}bIIGEOvY?o zCLBIO^dAo^6%WYt$6m;>nwtFERD9Qq2nlhs+{#TT{&#{yBV8YB*Cef%l!69iYmu!D z0!6UWrGQ5>$*6T>gO&YdF1si?Cw6{qd=*7e)mEn{6wKV5^8J^(<#a$8)g4p1|GA3@Wgj#g3q)pxw zS3g+c-`^uSDql*1?b@2({bT)i1Q;~>1b1R3{uXtiQ=$v zGB&&=u~Dmu(Yf8LAO%!DI!U*N-!s0CKpm_Tte<^e%60ulek0}d=P5Ji(gNQdJ8pF?u9 zI4u`?FkopJsz@P&YQkvyodH|Vw4Vt?mh_7O1M{{qUv-9AOcnnfgein@# zK<9-!Ss51Njw}R&P%i+Gt^lvkDnxB0t&Qq(Dr879s3_kQAcYa3N1c1tB4cHjN(&rH zW8_eV4kgcAMRvG>R9QpEgcR6hEG|;!SW8WSI`;A0!1X_?*IS* literal 0 HcmV?d00001 diff --git a/remote-frontend/public/review_fast_logo_dark.svg b/remote-frontend/public/review_fast_logo_dark.svg new file mode 100644 index 00000000..ed9cacab --- /dev/null +++ b/remote-frontend/public/review_fast_logo_dark.svg @@ -0,0 +1,3 @@ + + + diff --git a/remote-frontend/public/robots.txt b/remote-frontend/public/robots.txt new file mode 100644 index 00000000..1f53798b --- /dev/null +++ b/remote-frontend/public/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: / diff --git a/remote-frontend/src/App.tsx b/remote-frontend/src/App.tsx index dd42bc09..0ae0a8a0 100644 --- a/remote-frontend/src/App.tsx +++ b/remote-frontend/src/App.tsx @@ -3,10 +3,9 @@ function App() {

Vibe Kanban Remote

-

Frontend coming soon...

- ) + ); } -export default App +export default App; diff --git a/remote-frontend/src/AppRouter.tsx b/remote-frontend/src/AppRouter.tsx index c5a8e1fb..fdd3f013 100644 --- a/remote-frontend/src/AppRouter.tsx +++ b/remote-frontend/src/AppRouter.tsx @@ -1,28 +1,48 @@ -import { createBrowserRouter, RouterProvider } from 'react-router-dom' -import HomePage from './pages/HomePage' -import InvitationPage from './pages/InvitationPage' -import InvitationCompletePage from './pages/InvitationCompletePage' -import NotFoundPage from './pages/NotFoundPage' +import { createBrowserRouter, RouterProvider } from "react-router-dom"; +import HomePage from "./pages/HomePage"; +import InvitationPage from "./pages/InvitationPage"; +import InvitationCompletePage from "./pages/InvitationCompletePage"; +import ReviewPage from "./pages/ReviewPage"; +import AccountPage from "./pages/AccountPage"; +import AccountCompletePage from "./pages/AccountCompletePage"; +import OrganizationPage from "./pages/OrganizationPage"; +import NotFoundPage from "./pages/NotFoundPage"; const router = createBrowserRouter([ { - path: '/', + path: "/", element: , }, { - path: '/invitations/:token/accept', + path: "/review/:id", + element: , + }, + { + path: "/invitations/:token/accept", element: , }, { - path: '/invitations/:token/complete', + path: "/invitations/:token/complete", element: , }, { - path: '*', + path: "/account", + element: , + }, + { + path: "/account/complete", + element: , + }, + { + path: "/account/organizations/:orgId", + element: , + }, + { + path: "*", element: , }, -]) +]); export default function AppRouter() { - return + return ; } diff --git a/remote-frontend/src/api.ts b/remote-frontend/src/api.ts index 0050e1c1..4608adb9 100644 --- a/remote-frontend/src/api.ts +++ b/remote-frontend/src/api.ts @@ -1,5 +1,79 @@ +import type { ReviewResult } from "./types/review"; +import { + getAccessToken, + getRefreshToken, + storeTokens, + clearTokens, +} from "./auth"; + const API_BASE = import.meta.env.VITE_API_BASE_URL || ""; +// Types for account management +export type MemberRole = "ADMIN" | "MEMBER"; + +export type ProviderProfile = { + provider: string; + username: string | null; + display_name: string | null; + email: string | null; + avatar_url: string | null; +}; + +export type ProfileResponse = { + user_id: string; + username: string | null; + email: string; + providers: ProviderProfile[]; +}; + +export type Organization = { + id: string; + name: string; + slug: string; + is_personal: boolean; + created_at: string; + updated_at: string; +}; + +export type OrganizationWithRole = Organization & { + user_role: MemberRole; +}; + +export type OrganizationMemberWithProfile = { + user_id: string; + role: MemberRole; + joined_at: string; + first_name: string | null; + last_name: string | null; + username: string | null; + email: string | null; + avatar_url: string | null; +}; + +export type InvitationStatus = "PENDING" | "ACCEPTED" | "DECLINED" | "EXPIRED"; + +export type OrganizationInvitation = { + id: string; + organization_id: string; + invited_by_user_id: string | null; + email: string; + role: MemberRole; + status: InvitationStatus; + token: string; + created_at: string; + expires_at: string; +}; + +export type CreateOrganizationRequest = { + name: string; + slug: string; +}; + +export type GetOrganizationResponse = { + organization: Organization; + user_role: string; +}; + export type Invitation = { id: string; organization_slug: string; @@ -90,3 +164,380 @@ export async function acceptInvitation( } return res.json(); } + +export async function getReview(reviewId: string): Promise { + const res = await fetch(`${API_BASE}/v1/review/${reviewId}`); + if (!res.ok) { + if (res.status === 404) { + throw new Error("Review not found"); + } + throw new Error(`Failed to fetch review (${res.status})`); + } + return res.json(); +} + +export async function getFileContent( + reviewId: string, + fileHash: string, +): Promise { + const res = await fetch(`${API_BASE}/v1/review/${reviewId}/file/${fileHash}`); + if (!res.ok) { + throw new Error(`Failed to fetch file (${res.status})`); + } + return res.text(); +} + +export async function getDiff(reviewId: string): Promise { + const res = await fetch(`${API_BASE}/v1/review/${reviewId}/diff`); + if (!res.ok) { + if (res.status === 404) { + return ""; + } + throw new Error(`Failed to fetch diff (${res.status})`); + } + return res.text(); +} + +export interface ReviewMetadata { + gh_pr_url: string; + pr_title: string; +} + +export async function getReviewMetadata( + reviewId: string, +): Promise { + const res = await fetch(`${API_BASE}/v1/review/${reviewId}/metadata`); + if (!res.ok) { + throw new Error(`Failed to fetch review metadata (${res.status})`); + } + return res.json(); +} + +// Token refresh +export async function refreshTokens( + refreshToken: string, +): Promise<{ access_token: string; refresh_token: string }> { + const res = await fetch(`${API_BASE}/v1/tokens/refresh`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ refresh_token: refreshToken }), + }); + if (!res.ok) { + throw new Error(`Token refresh failed (${res.status})`); + } + return res.json(); +} + +// Authenticated fetch wrapper with automatic token refresh +let isRefreshing = false; +let refreshPromise: Promise | null = null; + +async function getValidAccessToken(): Promise { + const accessToken = getAccessToken(); + if (!accessToken) { + throw new Error("Not authenticated"); + } + return accessToken; +} + +async function handleTokenRefresh(): Promise { + if (isRefreshing && refreshPromise) { + return refreshPromise; + } + + const refreshToken = getRefreshToken(); + if (!refreshToken) { + clearTokens(); + throw new Error("No refresh token available"); + } + + isRefreshing = true; + refreshPromise = (async () => { + try { + const tokens = await refreshTokens(refreshToken); + storeTokens(tokens.access_token, tokens.refresh_token); + return tokens.access_token; + } catch { + clearTokens(); + throw new Error("Session expired"); + } finally { + isRefreshing = false; + refreshPromise = null; + } + })(); + + return refreshPromise; +} + +export async function authenticatedFetch( + url: string, + options: RequestInit = {}, +): Promise { + const accessToken = await getValidAccessToken(); + + const res = await fetch(url, { + ...options, + headers: { + ...options.headers, + Authorization: `Bearer ${accessToken}`, + }, + }); + + if (res.status === 401) { + // Try to refresh the token + const newAccessToken = await handleTokenRefresh(); + return fetch(url, { + ...options, + headers: { + ...options.headers, + Authorization: `Bearer ${newAccessToken}`, + }, + }); + } + + return res; +} + +// Profile APIs +export async function getProfile(): Promise { + const res = await authenticatedFetch(`${API_BASE}/v1/profile`); + if (!res.ok) { + throw new Error(`Failed to fetch profile (${res.status})`); + } + return res.json(); +} + +export async function logout(): Promise { + try { + await authenticatedFetch(`${API_BASE}/v1/oauth/logout`, { + method: "POST", + }); + } finally { + clearTokens(); + } +} + +// Organization APIs +export async function listOrganizations(): Promise { + const res = await authenticatedFetch(`${API_BASE}/v1/organizations`); + if (!res.ok) { + throw new Error(`Failed to fetch organizations (${res.status})`); + } + const data = await res.json(); + return data.organizations; +} + +export async function createOrganization( + data: CreateOrganizationRequest, +): Promise { + const res = await authenticatedFetch(`${API_BASE}/v1/organizations`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(data), + }); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.message || `Failed to create organization (${res.status})`); + } + const result = await res.json(); + return result.organization; +} + +export async function getOrganization( + orgId: string, +): Promise { + const res = await authenticatedFetch(`${API_BASE}/v1/organizations/${orgId}`); + if (!res.ok) { + throw new Error(`Failed to fetch organization (${res.status})`); + } + return res.json(); +} + +export async function updateOrganization( + orgId: string, + name: string, +): Promise { + const res = await authenticatedFetch(`${API_BASE}/v1/organizations/${orgId}`, { + method: "PATCH", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ name }), + }); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.message || `Failed to update organization (${res.status})`); + } + return res.json(); +} + +export async function deleteOrganization(orgId: string): Promise { + const res = await authenticatedFetch(`${API_BASE}/v1/organizations/${orgId}`, { + method: "DELETE", + }); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.message || `Failed to delete organization (${res.status})`); + } +} + +// Organization Members APIs +export async function listMembers( + orgId: string, +): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/members`, + ); + if (!res.ok) { + throw new Error(`Failed to fetch members (${res.status})`); + } + const data = await res.json(); + return data.members; +} + +export async function removeMember( + orgId: string, + userId: string, +): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/members/${userId}`, + { method: "DELETE" }, + ); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.message || `Failed to remove member (${res.status})`); + } +} + +export async function updateMemberRole( + orgId: string, + userId: string, + role: MemberRole, +): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/members/${userId}/role`, + { + method: "PATCH", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ role }), + }, + ); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.message || `Failed to update member role (${res.status})`); + } +} + +// Invitation APIs +export async function listInvitations( + orgId: string, +): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/invitations`, + ); + if (!res.ok) { + throw new Error(`Failed to fetch invitations (${res.status})`); + } + const data = await res.json(); + return data.invitations; +} + +export async function createInvitation( + orgId: string, + email: string, + role: MemberRole, +): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/invitations`, + { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ email, role }), + }, + ); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.message || `Failed to create invitation (${res.status})`); + } + const data = await res.json(); + return data.invitation; +} + +export async function revokeInvitation( + orgId: string, + invitationId: string, +): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/invitations/revoke`, + { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ invitation_id: invitationId }), + }, + ); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.message || `Failed to revoke invitation (${res.status})`); + } +} + +// GitHub App Integration Types +export type GitHubAppInstallation = { + id: string; + github_installation_id: number; + github_account_login: string; + github_account_type: "Organization" | "User"; + repository_selection: "all" | "selected"; + suspended_at: string | null; + created_at: string; +}; + +export type GitHubAppRepository = { + id: string; + github_repo_id: number; + repo_full_name: string; +}; + +export type GitHubAppStatus = { + installed: boolean; + installation: GitHubAppInstallation | null; + repositories: GitHubAppRepository[]; +}; + +export type GitHubAppInstallUrlResponse = { + install_url: string; +}; + +// GitHub App Integration APIs +export async function getGitHubAppInstallUrl( + orgId: string, +): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/github-app/install-url`, + ); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.error || `Failed to get install URL (${res.status})`); + } + return res.json(); +} + +export async function getGitHubAppStatus(orgId: string): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/github-app/status`, + ); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.error || `Failed to get GitHub App status (${res.status})`); + } + return res.json(); +} + +export async function disconnectGitHubApp(orgId: string): Promise { + const res = await authenticatedFetch( + `${API_BASE}/v1/organizations/${orgId}/github-app`, + { method: "DELETE" }, + ); + if (!res.ok) { + const error = await res.json().catch(() => ({})); + throw new Error(error.error || `Failed to disconnect GitHub App (${res.status})`); + } +} diff --git a/remote-frontend/src/auth.ts b/remote-frontend/src/auth.ts new file mode 100644 index 00000000..17e04d51 --- /dev/null +++ b/remote-frontend/src/auth.ts @@ -0,0 +1,26 @@ +// Auth token storage utilities using localStorage for persistent sessions + +const ACCESS_TOKEN_KEY = "rf_access_token"; +const REFRESH_TOKEN_KEY = "rf_refresh_token"; + +export function storeTokens(accessToken: string, refreshToken: string): void { + localStorage.setItem(ACCESS_TOKEN_KEY, accessToken); + localStorage.setItem(REFRESH_TOKEN_KEY, refreshToken); +} + +export function getAccessToken(): string | null { + return localStorage.getItem(ACCESS_TOKEN_KEY); +} + +export function getRefreshToken(): string | null { + return localStorage.getItem(REFRESH_TOKEN_KEY); +} + +export function clearTokens(): void { + localStorage.removeItem(ACCESS_TOKEN_KEY); + localStorage.removeItem(REFRESH_TOKEN_KEY); +} + +export function isLoggedIn(): boolean { + return getAccessToken() !== null && getRefreshToken() !== null; +} diff --git a/remote-frontend/src/components/CodeFragmentCard.tsx b/remote-frontend/src/components/CodeFragmentCard.tsx new file mode 100644 index 00000000..52e20888 --- /dev/null +++ b/remote-frontend/src/components/CodeFragmentCard.tsx @@ -0,0 +1,299 @@ +import { useMemo, useState } from "react"; +import hljs from "highlight.js/lib/core"; +import javascript from "highlight.js/lib/languages/javascript"; +import typescript from "highlight.js/lib/languages/typescript"; +import python from "highlight.js/lib/languages/python"; +import rust from "highlight.js/lib/languages/rust"; +import go from "highlight.js/lib/languages/go"; +import java from "highlight.js/lib/languages/java"; +import css from "highlight.js/lib/languages/css"; +import json from "highlight.js/lib/languages/json"; +import xml from "highlight.js/lib/languages/xml"; +import bash from "highlight.js/lib/languages/bash"; +import sql from "highlight.js/lib/languages/sql"; +import yaml from "highlight.js/lib/languages/yaml"; +import markdown from "highlight.js/lib/languages/markdown"; +import cpp from "highlight.js/lib/languages/cpp"; +import csharp from "highlight.js/lib/languages/csharp"; +import ruby from "highlight.js/lib/languages/ruby"; +import swift from "highlight.js/lib/languages/swift"; +import kotlin from "highlight.js/lib/languages/kotlin"; +import type { CodeFragment } from "../types/review"; + +// Register languages +hljs.registerLanguage("javascript", javascript); +hljs.registerLanguage("typescript", typescript); +hljs.registerLanguage("python", python); +hljs.registerLanguage("rust", rust); +hljs.registerLanguage("go", go); +hljs.registerLanguage("java", java); +hljs.registerLanguage("css", css); +hljs.registerLanguage("json", json); +hljs.registerLanguage("xml", xml); +hljs.registerLanguage("bash", bash); +hljs.registerLanguage("sql", sql); +hljs.registerLanguage("yaml", yaml); +hljs.registerLanguage("markdown", markdown); +hljs.registerLanguage("cpp", cpp); +hljs.registerLanguage("csharp", csharp); +hljs.registerLanguage("ruby", ruby); +hljs.registerLanguage("swift", swift); +hljs.registerLanguage("kotlin", kotlin); + +// Aliases +hljs.registerLanguage("js", javascript); +hljs.registerLanguage("ts", typescript); +hljs.registerLanguage("tsx", typescript); +hljs.registerLanguage("jsx", javascript); +hljs.registerLanguage("py", python); +hljs.registerLanguage("rs", rust); +hljs.registerLanguage("rb", ruby); +hljs.registerLanguage("sh", bash); +hljs.registerLanguage("html", xml); +hljs.registerLanguage("htm", xml); +hljs.registerLanguage("yml", yaml); +hljs.registerLanguage("cs", csharp); +hljs.registerLanguage("kt", kotlin); + +const extToLang: Record = { + js: "javascript", + mjs: "javascript", + cjs: "javascript", + ts: "typescript", + tsx: "typescript", + jsx: "javascript", + py: "python", + rs: "rust", + go: "go", + java: "java", + css: "css", + json: "json", + html: "xml", + htm: "xml", + xml: "xml", + sh: "bash", + bash: "bash", + sql: "sql", + yml: "yaml", + yaml: "yaml", + md: "markdown", + cpp: "cpp", + cc: "cpp", + c: "cpp", + h: "cpp", + cs: "csharp", + rb: "ruby", + swift: "swift", + kt: "kotlin", +}; + +function getLanguageFromPath(path: string): string { + const ext = path.split(".").pop()?.toLowerCase() || ""; + return extToLang[ext] || "plaintext"; +} + +type ViewMode = "fragment" | "file"; + +interface CodeFragmentCardProps { + fragment: CodeFragment; + fileContent?: string; + isLoading?: boolean; + unchangedRegion?: boolean; + hideHeader?: boolean; +} + +export function CodeFragmentCard({ + fragment, + fileContent, + isLoading, + unchangedRegion, + hideHeader, +}: CodeFragmentCardProps) { + const { file, start_line, end_line, message } = fragment; + const [viewMode, setViewMode] = useState("fragment"); + const lang = getLanguageFromPath(file); + + const highlightedLines = useMemo(() => { + if (!fileContent) return null; + + if (viewMode === "fragment") { + return getHighlightedLines(fileContent, start_line, end_line, lang); + } else { + // Full file view + const allLines = fileContent.split(/\r?\n/); + return getHighlightedLines(fileContent, 1, allLines.length, lang); + } + }, [fileContent, start_line, end_line, lang, viewMode]); + + const isInFragment = (lineNumber: number) => + lineNumber >= start_line && lineNumber <= end_line; + + return ( +
+ {/* Header */} + {!hideHeader && ( +
+
+
+ {file} + + Lines {start_line} + {end_line !== start_line && `–${end_line}`} + + {unchangedRegion && ( + + Unchanged + + )} +
+
+ {fileContent && ( + + )} +
+
+ {message && ( +
+ + + + {message} +
+ )} +
+ )} + + {/* Code Content */} + {isLoading ? ( +
+
+ Loading... +
+ ) : highlightedLines ? ( +
+ + + {highlightedLines.map(({ lineNumber, html }) => ( + + + + ))} + +
+ {lineNumber} + +
+
+ ) : ( +
+ File content unavailable for this fragment. +
+ )} +
+ ); +} + +function getHighlightedLines( + content: string, + startLine: number, + endLine: number, + lang: string, +): { lineNumber: number; html: string }[] { + const allLines = content.split(/\r?\n/); + const s = Math.max(1, startLine); + const e = Math.min(allLines.length, endLine); + const result: { lineNumber: number; html: string }[] = []; + + for (let i = s; i <= e; i++) { + const line = allLines[i - 1] || ""; + let html: string; + + try { + if (lang !== "plaintext" && hljs.getLanguage(lang)) { + html = hljs.highlight(line, { + language: lang, + ignoreIllegals: true, + }).value; + } else { + html = escapeHtml(line); + } + } catch { + html = escapeHtml(line); + } + + result.push({ lineNumber: i, html }); + } + + return result; +} + +function escapeHtml(text: string): string { + return text + .replace(/&/g, "&") + .replace(//g, ">"); +} diff --git a/remote-frontend/src/components/MarkdownRenderer.tsx b/remote-frontend/src/components/MarkdownRenderer.tsx new file mode 100644 index 00000000..9070e507 --- /dev/null +++ b/remote-frontend/src/components/MarkdownRenderer.tsx @@ -0,0 +1,84 @@ +interface MarkdownRendererProps { + content: string; + className?: string; +} + +export function MarkdownRenderer({ + content, + className = "", +}: MarkdownRendererProps) { + return ( +
*:first-child]:mt-0 ${className}`} + dangerouslySetInnerHTML={{ __html: parseMarkdown(content) }} + /> + ); +} + +function parseMarkdown(text: string): string { + if (!text) return ""; + + let html = text + .replace(/&/g, "&") + .replace(//g, ">"); + + // Code blocks (must be before inline code) + html = html.replace(/```(\w*)\n([\s\S]*?)```/g, (_, _lang, code) => { + return `
${code.trim()}
`; + }); + + // Headings + html = html.replace( + /^### (.+)$/gm, + '

$1

', + ); + html = html.replace( + /^## (.+)$/gm, + '

$1

', + ); + html = html.replace( + /^# (.+)$/gm, + '

$1

', + ); + + // Bold and italic + html = html.replace( + /\*\*(.+?)\*\*/g, + '$1', + ); + html = html.replace(/\*(.+?)\*/g, '$1'); + + // Inline code + html = html.replace( + /`([^`]+)`/g, + '$1', + ); + + // Links + html = html.replace( + /\[([^\]]+)\]\(([^)]+)\)/g, + '$1', + ); + + // Lists + html = html.replace(/^- (.+)$/gm, '
  • $1
  • '); + html = html.replace( + /(]*>.*<\/li>\n?)+/g, + '
      $&
    ', + ); + + // Paragraphs - wrap lines that aren't already wrapped in tags + html = html.replace( + /^(?!<[huplo]|$1

    ', + ); + + // Clean up empty paragraphs + html = html.replace(/

    <\/p>/g, ""); + + // Line breaks within paragraphs + html = html.replace(/\n(?!<)/g, "
    "); + + return html; +} diff --git a/remote-frontend/src/index.css b/remote-frontend/src/index.css index b5c61c95..ed6a15bd 100644 --- a/remote-frontend/src/index.css +++ b/remote-frontend/src/index.css @@ -1,3 +1,100 @@ +@import url("https://fonts.googleapis.com/css2?family=Chivo+Mono:ital,wght@0,100..900;1,100..900&family=Inter:wght@400;500;600;700&family=JetBrains+Mono:wght@400;500;600&family=Noto+Emoji:wght@300..700&display=swap"); + @tailwind base; @tailwind components; @tailwind utilities; + +@layer base { + :root { + color-scheme: dark; + --background: 0 0% 12%; + /* #1E1E1E */ + --foreground: 0 0% 90%; + --primary: 0 0% 13%; + /* #212121 */ + --primary-foreground: 0 0% 90%; + --secondary: 0 0% 13%; + /* #212121 */ + --secondary-foreground: 0 0% 70%; + --muted: 0 0% 19%; + /* #212121 */ + --muted-foreground: 0 0% 65%; + --accent: 0 0% 16%; + /* #292929 */ + --accent-foreground: 0 0% 90%; + --destructive: 0 45% 55%; + --destructive-foreground: 0 0% 90%; + --border: 0 0% 16%; + /* #292929 */ + --input: 0 0% 16%; + /* #292929 */ + --ring: 0 0% 50%; + --radius: 0.5rem; + + /* Syntax highlighting (dark) */ + --syntax-keyword: #ff7b72; + --syntax-function: #d2a8ff; + --syntax-constant: #79c0ff; + --syntax-string: #a5d6ff; + --syntax-variable: #ffa657; + --syntax-comment: #8b949e; + --syntax-tag: #7ee787; + --syntax-punctuation: #c9d1d9; + } +} + +@layer base { + * { + @apply border-border; + } + + html, + body, + #root { + @apply min-h-screen; + } + + body { + @apply bg-background text-foreground font-sans; + } +} + +/* Syntax highlighting classes */ +.hljs-keyword, +.hljs-type { + color: var(--syntax-keyword); +} + +.hljs-title, +.hljs-title.function_ { + color: var(--syntax-function); +} + +.hljs-attr, +.hljs-number, +.hljs-literal { + color: var(--syntax-constant); +} + +.hljs-string { + color: var(--syntax-string); +} + +.hljs-built_in, +.hljs-symbol { + color: var(--syntax-variable); +} + +.hljs-comment { + color: var(--syntax-comment); +} + +.hljs-name, +.hljs-tag { + color: var(--syntax-tag); +} + +.hljs-punctuation, +.hljs-property { + color: var(--syntax-punctuation); +} diff --git a/remote-frontend/src/lib/diff-parser.ts b/remote-frontend/src/lib/diff-parser.ts new file mode 100644 index 00000000..c2456c3c --- /dev/null +++ b/remote-frontend/src/lib/diff-parser.ts @@ -0,0 +1,281 @@ +export interface ParsedHunk { + header: string; + oldStart: number; + oldLines: number; + newStart: number; + newLines: number; + lines: string[]; +} + +export interface ParsedFileDiff { + oldPath: string; + newPath: string; + hunks: ParsedHunk[]; + rawDiff: string; +} + +export function parseUnifiedDiff(diffText: string): ParsedFileDiff[] { + const files: ParsedFileDiff[] = []; + const lines = diffText.split("\n"); + + let currentFile: ParsedFileDiff | null = null; + let currentHunk: ParsedHunk | null = null; + let fileStartIdx = 0; + + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + + if (line.startsWith("diff --git")) { + if (currentHunk && currentFile) currentFile.hunks.push(currentHunk); + if (currentFile) { + currentFile.rawDiff = lines.slice(fileStartIdx, i).join("\n"); + files.push(currentFile); + } + currentFile = { oldPath: "", newPath: "", hunks: [], rawDiff: "" }; + currentHunk = null; + fileStartIdx = i; + } else if (line.startsWith("--- ")) { + if (currentFile) { + currentFile.oldPath = line.slice(4).replace(/^a\//, ""); + } + } else if (line.startsWith("+++ ")) { + if (currentFile) { + currentFile.newPath = line.slice(4).replace(/^b\//, ""); + } + } else if (line.startsWith("@@")) { + const match = line.match(/@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@(.*)?/); + if (match && currentFile) { + if (currentHunk) currentFile.hunks.push(currentHunk); + currentHunk = { + header: line, + oldStart: parseInt(match[1], 10), + oldLines: match[2] ? parseInt(match[2], 10) : 1, + newStart: parseInt(match[3], 10), + newLines: match[4] ? parseInt(match[4], 10) : 1, + lines: [], + }; + } + } else if ( + currentHunk && + (line.startsWith("+") || line.startsWith("-") || line.startsWith(" ")) + ) { + currentHunk.lines.push(line); + } + } + + if (currentHunk && currentFile) currentFile.hunks.push(currentHunk); + if (currentFile) { + currentFile.rawDiff = lines.slice(fileStartIdx).join("\n"); + files.push(currentFile); + } + + return files; +} + +export function getFileDiff( + parsedDiffs: ParsedFileDiff[], + filePath: string, +): ParsedFileDiff | undefined { + return parsedDiffs.find( + (f) => f.newPath === filePath || f.oldPath === filePath, + ); +} + +export function hunkOverlapsRange( + hunk: ParsedHunk, + startLine: number, + endLine: number, +): boolean { + const hunkEnd = hunk.newStart + hunk.newLines - 1; + return hunk.newStart <= endLine && hunkEnd >= startLine; +} + +export function filterHunksToRange( + fileDiff: ParsedFileDiff, + startLine: number, + endLine: number, + contextLines: number = 3, +): string { + const expandedStart = Math.max(1, startLine - contextLines); + const expandedEnd = endLine + contextLines; + + const relevantHunks = fileDiff.hunks.filter((h) => + hunkOverlapsRange(h, expandedStart, expandedEnd), + ); + + if (relevantHunks.length === 0) { + return ""; + } + + const diffLines: string[] = []; + diffLines.push(`--- a/${fileDiff.oldPath}`); + diffLines.push(`+++ b/${fileDiff.newPath}`); + + for (const hunk of relevantHunks) { + diffLines.push(hunk.header); + diffLines.push(...hunk.lines); + } + + return diffLines.join("\n"); +} + +export function buildFullFileDiff(fileDiff: ParsedFileDiff): string { + if (fileDiff.hunks.length === 0) { + return ""; + } + + const diffLines: string[] = []; + diffLines.push(`--- a/${fileDiff.oldPath}`); + diffLines.push(`+++ b/${fileDiff.newPath}`); + + for (const hunk of fileDiff.hunks) { + diffLines.push(hunk.header); + diffLines.push(...hunk.lines); + } + + return diffLines.join("\n") + "\n"; +} + +export interface HunkLineInfo { + newLineNumber: number | null; + oldLineNumber: number | null; + type: "add" | "delete" | "context"; + content: string; +} + +export function parseHunkLines(hunk: ParsedHunk): HunkLineInfo[] { + const result: HunkLineInfo[] = []; + let oldLine = hunk.oldStart; + let newLine = hunk.newStart; + + for (const line of hunk.lines) { + const prefix = line[0]; + const content = line.slice(1); + + if (prefix === "+") { + result.push({ + newLineNumber: newLine, + oldLineNumber: null, + type: "add", + content, + }); + newLine++; + } else if (prefix === "-") { + result.push({ + newLineNumber: null, + oldLineNumber: oldLine, + type: "delete", + content, + }); + oldLine++; + } else { + result.push({ + newLineNumber: newLine, + oldLineNumber: oldLine, + type: "context", + content, + }); + oldLine++; + newLine++; + } + } + + return result; +} + +export function synthesizeFragmentDiff( + fileDiff: ParsedFileDiff, + newFileContent: string, + startLine: number, + endLine: number, + contextLines: number = 3, +): string { + const newFileLines = newFileContent.split(/\r?\n/); + const expandedStart = Math.max(1, startLine - contextLines); + const expandedEnd = Math.min(newFileLines.length, endLine + contextLines); + + const relevantHunks = fileDiff.hunks.filter((h) => + hunkOverlapsRange(h, expandedStart, expandedEnd), + ); + + const changeMap = new Map< + number, + { type: "add" | "context"; deletionsBefore: string[] } + >(); + + for (let i = expandedStart; i <= expandedEnd; i++) { + changeMap.set(i, { type: "context", deletionsBefore: [] }); + } + + for (const hunk of relevantHunks) { + const lines = parseHunkLines(hunk); + let pendingDeletions: string[] = []; + + for (const line of lines) { + if (line.type === "delete") { + pendingDeletions.push(line.content); + } else { + const newLineNum = line.newLineNumber!; + if (newLineNum >= expandedStart && newLineNum <= expandedEnd) { + const existing = changeMap.get(newLineNum)!; + existing.deletionsBefore.push(...pendingDeletions); + if (line.type === "add") { + existing.type = "add"; + } + } + pendingDeletions = []; + } + } + + if (pendingDeletions.length > 0) { + const lastNewLine = Math.min( + hunk.newStart + hunk.newLines, + expandedEnd + 1, + ); + if (lastNewLine <= expandedEnd) { + const existing = changeMap.get(lastNewLine); + if (existing) { + existing.deletionsBefore.push(...pendingDeletions); + } + } + } + } + + const outputLines: string[] = []; + let oldLineCount = 0; + let newLineCount = 0; + + for (let i = expandedStart; i <= expandedEnd; i++) { + const info = changeMap.get(i)!; + const lineContent = newFileLines[i - 1] ?? ""; + + for (const del of info.deletionsBefore) { + outputLines.push(`-${del}`); + oldLineCount++; + } + + if (info.type === "add") { + outputLines.push(`+${lineContent}`); + newLineCount++; + } else { + outputLines.push(` ${lineContent}`); + oldLineCount++; + newLineCount++; + } + } + + const oldStart = expandedStart; + const header = `@@ -${oldStart},${oldLineCount} +${expandedStart},${newLineCount} @@`; + + if (outputLines.length === 0) { + return ""; + } + + const diffLines: string[] = []; + diffLines.push(`--- a/${fileDiff.oldPath}`); + diffLines.push(`+++ b/${fileDiff.newPath}`); + diffLines.push(header); + diffLines.push(...outputLines); + + return diffLines.join("\n") + "\n"; +} diff --git a/remote-frontend/src/lib/extToLanguage.ts b/remote-frontend/src/lib/extToLanguage.ts new file mode 100644 index 00000000..5fefba02 --- /dev/null +++ b/remote-frontend/src/lib/extToLanguage.ts @@ -0,0 +1,72 @@ +/** + * getHighlightLanguage(ext) + * Returns the Highlight.js language id (or null if not mapped). + * + * @param {string} ext – File extension with or without the leading dot. + * @example + * getHighlightLanguage('.py'); // "python" + * getHighlightLanguage('tsx'); // "tsx" + */ +const extToLang: Record = { + // Web & scripting + js: "javascript", + mjs: "javascript", + cjs: "javascript", + ts: "typescript", + jsx: "jsx", + tsx: "tsx", + html: "xml", // Highlight.js groups HTML/XML + htm: "xml", + xml: "xml", + css: "css", + scss: "scss", + less: "less", + json: "json", + md: "markdown", + yml: "yaml", + yaml: "yaml", + sh: "bash", + bash: "bash", + zsh: "bash", + ps1: "powershell", + php: "php", + + // Classic compiled + c: "c", + h: "c", + cpp: "cpp", + cc: "cpp", + cxx: "cpp", + hpp: "cpp", + cs: "csharp", + java: "java", + kt: "kotlin", + scala: "scala", + go: "go", + rs: "rust", + swift: "swift", + dart: "dart", + + // Others & fun stuff + py: "python", + rb: "ruby", + pl: "perl", + lua: "lua", + r: "r", + sql: "sql", + tex: "latex", + toml: "ini", +}; + +/** + * Normalises the extension and looks it up. + */ +export function getHighlightLanguage(ext: string): string | null { + ext = ext.toLowerCase(); + return extToLang[ext] ?? null; +} + +export function getHighlightLanguageFromPath(path: string): string { + const ext = path.split(".").pop() ?? ""; + return getHighlightLanguage(ext) ?? "plaintext"; +} diff --git a/remote-frontend/src/lib/utils.ts b/remote-frontend/src/lib/utils.ts new file mode 100644 index 00000000..c6cfca20 --- /dev/null +++ b/remote-frontend/src/lib/utils.ts @@ -0,0 +1,42 @@ +import { type ClassValue, clsx } from "clsx"; +import { twMerge } from "tailwind-merge"; + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)); +} + +const extToLang: Record = { + js: "javascript", + mjs: "javascript", + cjs: "javascript", + ts: "typescript", + jsx: "jsx", + tsx: "tsx", + html: "xml", + htm: "xml", + xml: "xml", + css: "css", + scss: "scss", + json: "json", + md: "markdown", + yml: "yaml", + yaml: "yaml", + sh: "bash", + bash: "bash", + py: "python", + rb: "ruby", + go: "go", + rs: "rust", + java: "java", + c: "c", + cpp: "cpp", + cs: "csharp", + swift: "swift", + kt: "kotlin", + sql: "sql", +}; + +export function getLanguageFromPath(path: string): string { + const ext = path.split(".").pop()?.toLowerCase() || ""; + return extToLang[ext] || "plaintext"; +} diff --git a/remote-frontend/src/main.tsx b/remote-frontend/src/main.tsx index 24c718a7..794062f2 100644 --- a/remote-frontend/src/main.tsx +++ b/remote-frontend/src/main.tsx @@ -1,10 +1,20 @@ -import React from 'react' -import ReactDOM from 'react-dom/client' -import AppRouter from './AppRouter.tsx' -import './index.css' +import React from "react"; +import ReactDOM from "react-dom/client"; +import posthog from "posthog-js"; +import { PostHogProvider } from "posthog-js/react"; +import AppRouter from "./AppRouter.tsx"; +import "./index.css"; -ReactDOM.createRoot(document.getElementById('root')!).render( +if (import.meta.env.VITE_PUBLIC_POSTHOG_KEY) { + posthog.init(import.meta.env.VITE_PUBLIC_POSTHOG_KEY, { + api_host: import.meta.env.VITE_PUBLIC_POSTHOG_HOST, + }); +} + +ReactDOM.createRoot(document.getElementById("root")!).render( - + + + , -) +); diff --git a/remote-frontend/src/pages/AccountCompletePage.tsx b/remote-frontend/src/pages/AccountCompletePage.tsx new file mode 100644 index 00000000..3e46d28d --- /dev/null +++ b/remote-frontend/src/pages/AccountCompletePage.tsx @@ -0,0 +1,149 @@ +import { useEffect, useMemo, useState } from "react"; +import { useLocation, useNavigate } from "react-router-dom"; +import { redeemOAuth } from "../api"; +import { storeTokens } from "../auth"; +import { retrieveVerifier, clearVerifier } from "../pkce"; + +export default function AccountCompletePage() { + const navigate = useNavigate(); + const { search } = useLocation(); + const qp = useMemo(() => new URLSearchParams(search), [search]); + const [error, setError] = useState(null); + const [success, setSuccess] = useState(false); + + const handoffId = qp.get("handoff_id"); + const appCode = qp.get("app_code"); + const oauthError = qp.get("error"); + + useEffect(() => { + const completeLogin = async () => { + if (oauthError) { + setError(`OAuth error: ${oauthError}`); + return; + } + + if (!handoffId || !appCode) { + return; + } + + try { + const verifier = retrieveVerifier(); + if (!verifier) { + setError("OAuth session lost. Please try again."); + return; + } + + const { access_token, refresh_token } = await redeemOAuth( + handoffId, + appCode, + verifier, + ); + + storeTokens(access_token, refresh_token); + clearVerifier(); + + setSuccess(true); + + // Redirect to account page after brief delay + setTimeout(() => { + navigate("/account", { replace: true }); + }, 1000); + } catch (e) { + setError(e instanceof Error ? e.message : "Failed to complete login"); + clearVerifier(); + } + }; + + completeLogin(); + }, [handoffId, appCode, oauthError, navigate]); + + if (error) { + return ( + navigate("/account", { replace: true })} + /> + ); + } + + if (success) { + return ( + + ); + } + + return ( + + ); +} + +function StatusCard({ + title, + body, + isError = false, + isSuccess = false, + showRetry = false, + onRetry, +}: { + title: string; + body: string; + isError?: boolean; + isSuccess?: boolean; + showRetry?: boolean; + onRetry?: () => void; +}) { + return ( +

    +
    +

    + {title} +

    +

    {body}

    + {isSuccess && ( +
    + + + + + Redirecting... +
    + )} + {showRetry && onRetry && ( + + )} +
    +
    + ); +} diff --git a/remote-frontend/src/pages/AccountPage.tsx b/remote-frontend/src/pages/AccountPage.tsx new file mode 100644 index 00000000..84efc183 --- /dev/null +++ b/remote-frontend/src/pages/AccountPage.tsx @@ -0,0 +1,357 @@ +import { useEffect, useState } from "react"; +import { Link, useNavigate } from "react-router-dom"; +import { isLoggedIn } from "../auth"; +import { + initOAuth, + getProfile, + logout, + listOrganizations, + createOrganization, + type OAuthProvider, + type ProfileResponse, + type OrganizationWithRole, +} from "../api"; +import { + generateVerifier, + generateChallenge, + storeVerifier, +} from "../pkce"; + +export default function AccountPage() { + const navigate = useNavigate(); + const [loading, setLoading] = useState(true); + const [authenticated, setAuthenticated] = useState(false); + const [profile, setProfile] = useState(null); + const [organizations, setOrganizations] = useState( + [], + ); + const [error, setError] = useState(null); + const [oauthLoading, setOauthLoading] = useState(false); + + // Create org form state + const [showCreateForm, setShowCreateForm] = useState(false); + const [newOrgName, setNewOrgName] = useState(""); + const [newOrgSlug, setNewOrgSlug] = useState(""); + const [createLoading, setCreateLoading] = useState(false); + const [createError, setCreateError] = useState(null); + + useEffect(() => { + if (isLoggedIn()) { + setAuthenticated(true); + loadData(); + } else { + setLoading(false); + } + }, []); + + async function loadData() { + try { + const [profileData, orgsData] = await Promise.all([ + getProfile(), + listOrganizations(), + ]); + setProfile(profileData); + setOrganizations(orgsData); + } catch (e) { + setError(e instanceof Error ? e.message : "Failed to load data"); + setAuthenticated(false); + } finally { + setLoading(false); + } + } + + const handleOAuthLogin = async (provider: OAuthProvider) => { + setOauthLoading(true); + try { + const verifier = generateVerifier(); + const challenge = await generateChallenge(verifier); + storeVerifier(verifier); + + const appBase = + import.meta.env.VITE_APP_BASE_URL || window.location.origin; + const returnTo = `${appBase}/account/complete`; + + const result = await initOAuth(provider, returnTo, challenge); + window.location.assign(result.authorize_url); + } catch (e) { + setError(e instanceof Error ? e.message : "OAuth init failed"); + setOauthLoading(false); + } + }; + + const handleLogout = async () => { + try { + await logout(); + setAuthenticated(false); + setProfile(null); + setOrganizations([]); + } catch (e) { + // Tokens already cleared in logout() + setAuthenticated(false); + } + }; + + const handleCreateOrg = async (e: React.FormEvent) => { + e.preventDefault(); + setCreateLoading(true); + setCreateError(null); + + try { + const org = await createOrganization({ + name: newOrgName.trim(), + slug: newOrgSlug.trim().toLowerCase(), + }); + navigate(`/account/organizations/${org.id}`); + } catch (err) { + setCreateError(err instanceof Error ? err.message : "Failed to create"); + } finally { + setCreateLoading(false); + } + }; + + const generateSlug = (name: string) => { + return name + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-|-$/g, "") + .slice(0, 63); + }; + + if (loading) { + return ; + } + + if (!authenticated) { + return ( +
    +
    +
    +

    Sign In

    +

    + Sign in to manage your account and organizations +

    +
    + + {error && ( +
    +

    {error}

    +
    + )} + +
    + handleOAuthLogin("github")} + disabled={oauthLoading} + /> + handleOAuthLogin("google")} + disabled={oauthLoading} + /> +
    +
    +
    + ); + } + + return ( +
    +
    + {/* Profile Card */} +
    +
    +
    + {profile?.providers[0]?.avatar_url && ( + Avatar + )} +
    +

    + {profile?.providers[0]?.display_name || + profile?.username || + "User"} +

    +

    {profile?.email}

    +
    +
    + +
    + + {profile && profile.providers.length > 0 && ( +
    +

    Connected accounts:

    +
    + {profile.providers.map((p) => ( + + {p.provider} + {p.username && ` (${p.username})`} + + ))} +
    +
    + )} +
    + + {/* Organizations Card */} +
    +
    +

    + Organizations +

    + +
    + + {showCreateForm && ( +
    +
    + + { + setNewOrgName(e.target.value); + if (!newOrgSlug || newOrgSlug === generateSlug(newOrgName)) { + setNewOrgSlug(generateSlug(e.target.value)); + } + }} + placeholder="My Organization" + className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent" + required + /> +
    +
    + + setNewOrgSlug(e.target.value.toLowerCase())} + placeholder="my-organization" + pattern="[a-z0-9\-_]+" + minLength={3} + maxLength={63} + className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent" + required + /> +

    + Only lowercase letters, numbers, hyphens, and underscores +

    +
    + {createError && ( +

    {createError}

    + )} + +
    + )} + + {organizations.length === 0 ? ( +

    + You don't belong to any organizations yet. +

    + ) : ( +
    + {organizations.map((org) => ( + +
    +

    {org.name}

    +

    @{org.slug}

    +
    +
    + {org.is_personal && ( + + Personal + + )} + + {org.user_role} + + + + +
    + + ))} +
    + )} +
    +
    +
    + ); +} + +function OAuthButton({ + label, + onClick, + disabled, +}: { + label: string; + onClick: () => void; + disabled?: boolean; +}) { + return ( + + ); +} + +function LoadingCard({ text }: { text: string }) { + return ( +
    +
    {text}
    +
    + ); +} diff --git a/remote-frontend/src/pages/HomePage.tsx b/remote-frontend/src/pages/HomePage.tsx index 85279ca5..4317a03c 100644 --- a/remote-frontend/src/pages/HomePage.tsx +++ b/remote-frontend/src/pages/HomePage.tsx @@ -18,5 +18,5 @@ export default function HomePage() {
    - ) + ); } diff --git a/remote-frontend/src/pages/InvitationCompletePage.tsx b/remote-frontend/src/pages/InvitationCompletePage.tsx index 914e7ccc..74a50fab 100644 --- a/remote-frontend/src/pages/InvitationCompletePage.tsx +++ b/remote-frontend/src/pages/InvitationCompletePage.tsx @@ -1,97 +1,95 @@ -import { useEffect, useMemo, useState } from 'react' -import { useLocation, useParams } from 'react-router-dom' -import { redeemOAuth, acceptInvitation } from '../api' +import { useEffect, useMemo, useState } from "react"; +import { useLocation, useParams } from "react-router-dom"; +import { redeemOAuth, acceptInvitation } from "../api"; import { retrieveVerifier, retrieveInvitationToken, clearVerifier, clearInvitationToken, -} from '../pkce' +} from "../pkce"; export default function InvitationCompletePage() { - const { token: urlToken } = useParams() - const { search } = useLocation() - const qp = useMemo(() => new URLSearchParams(search), [search]) - const [error, setError] = useState(null) - const [success, setSuccess] = useState(false) - const [orgSlug, setOrgSlug] = useState(null) + const { token: urlToken } = useParams(); + const { search } = useLocation(); + const qp = useMemo(() => new URLSearchParams(search), [search]); + const [error, setError] = useState(null); + const [success, setSuccess] = useState(false); + const [orgSlug, setOrgSlug] = useState(null); - const handoffId = qp.get('handoff_id') - const appCode = qp.get('app_code') - const oauthError = qp.get('error') + const handoffId = qp.get("handoff_id"); + const appCode = qp.get("app_code"); + const oauthError = qp.get("error"); useEffect(() => { const completeInvitation = async () => { if (oauthError) { - setError(`OAuth error: ${oauthError}`) - return + setError(`OAuth error: ${oauthError}`); + return; } if (!handoffId || !appCode) { - return + return; } try { - const verifier = retrieveVerifier() + const verifier = retrieveVerifier(); if (!verifier) { - setError('OAuth session lost. Please try again.') - return + setError("OAuth session lost. Please try again."); + return; } - const token = retrieveInvitationToken() || urlToken + const token = retrieveInvitationToken() || urlToken; if (!token) { - setError('Invitation token lost. Please try again.') - return + setError("Invitation token lost. Please try again."); + return; } const { access_token } = await redeemOAuth( handoffId, appCode, - verifier - ) + verifier, + ); - const result = await acceptInvitation(token, access_token) + const result = await acceptInvitation(token, access_token); - clearVerifier() - clearInvitationToken() + clearVerifier(); + clearInvitationToken(); - setSuccess(true) - setOrgSlug(result.organization_slug) + setSuccess(true); + setOrgSlug(result.organization_slug); const timer = setTimeout(() => { const appBase = - import.meta.env.VITE_APP_BASE_URL || window.location.origin - window.location.assign(`${appBase}`) - }, 2000) - return () => clearTimeout(timer) + import.meta.env.VITE_APP_BASE_URL || window.location.origin; + window.location.assign(`${appBase}`); + }, 2000); + return () => clearTimeout(timer); } catch (e) { - setError(e instanceof Error ? e.message : 'Failed to complete invitation') - clearVerifier() - clearInvitationToken() + setError( + e instanceof Error ? e.message : "Failed to complete invitation", + ); + clearVerifier(); + clearInvitationToken(); } - } + }; - completeInvitation() - }, [handoffId, appCode, oauthError, urlToken]) + completeInvitation(); + }, [handoffId, appCode, oauthError, urlToken]); if (error) { return ( - - ) + + ); } if (success) { return ( - ) + ); } return ( @@ -99,7 +97,7 @@ export default function InvitationCompletePage() { title="Completing invitation..." body="Processing OAuth callback..." /> - ) + ); } function StatusCard({ @@ -108,31 +106,29 @@ function StatusCard({ isError = false, isSuccess = false, }: { - title: string - body: string - isError?: boolean - isSuccess?: boolean + title: string; + body: string; + isError?: boolean; + isSuccess?: boolean; }) { return (

    {title}

    {body}

    {isSuccess && (
    - +
    - ) + ); } diff --git a/remote-frontend/src/pages/InvitationPage.tsx b/remote-frontend/src/pages/InvitationPage.tsx index 00dfa612..c7231102 100644 --- a/remote-frontend/src/pages/InvitationPage.tsx +++ b/remote-frontend/src/pages/InvitationPage.tsx @@ -1,62 +1,57 @@ -import { useEffect, useState } from 'react' -import { useParams } from 'react-router-dom' +import { useEffect, useState } from "react"; +import { useParams } from "react-router-dom"; import { getInvitation, initOAuth, type Invitation, type OAuthProvider, -} from '../api' +} from "../api"; import { generateVerifier, generateChallenge, storeVerifier, storeInvitationToken, -} from '../pkce' +} from "../pkce"; export default function InvitationPage() { - const { token = '' } = useParams() - const [data, setData] = useState(null) - const [error, setError] = useState(null) - const [loading, setLoading] = useState(false) + const { token = "" } = useParams(); + const [data, setData] = useState(null); + const [error, setError] = useState(null); + const [loading, setLoading] = useState(false); useEffect(() => { getInvitation(token) .then(setData) - .catch((e) => setError(e.message)) - }, [token]) + .catch((e) => setError(e.message)); + }, [token]); const handleOAuthLogin = async (provider: OAuthProvider) => { - setLoading(true) + setLoading(true); try { - const verifier = generateVerifier() - const challenge = await generateChallenge(verifier) + const verifier = generateVerifier(); + const challenge = await generateChallenge(verifier); - storeVerifier(verifier) - storeInvitationToken(token) + storeVerifier(verifier); + storeInvitationToken(token); const appBase = - import.meta.env.VITE_APP_BASE_URL || window.location.origin - const returnTo = `${appBase}/invitations/${token}/complete` + import.meta.env.VITE_APP_BASE_URL || window.location.origin; + const returnTo = `${appBase}/invitations/${token}/complete`; - const result = await initOAuth(provider, returnTo, challenge) - window.location.assign(result.authorize_url) + const result = await initOAuth(provider, returnTo, challenge); + window.location.assign(result.authorize_url); } catch (e) { - setError(e instanceof Error ? e.message : 'OAuth init failed') - setLoading(false) + setError(e instanceof Error ? e.message : "OAuth init failed"); + setLoading(false); } - } + }; if (error) { - return ( - - ) + return ; } if (!data) { - return + return ; } return ( @@ -90,18 +85,18 @@ export default function InvitationPage() {

    handleOAuthLogin('github')} + onClick={() => handleOAuthLogin("github")} disabled={loading} /> handleOAuthLogin('google')} + onClick={() => handleOAuthLogin("google")} disabled={loading} />
    - ) + ); } function OAuthButton({ @@ -109,9 +104,9 @@ function OAuthButton({ onClick, disabled, }: { - label: string - onClick: () => void - disabled?: boolean + label: string; + onClick: () => void; + disabled?: boolean; }) { return ( - ) + ); } function LoadingCard({ text }: { text: string }) { @@ -129,7 +124,7 @@ function LoadingCard({ text }: { text: string }) {
    {text}
    - ) + ); } function ErrorCard({ title, body }: { title: string; body?: string }) { @@ -140,5 +135,5 @@ function ErrorCard({ title, body }: { title: string; body?: string }) { {body &&

    {body}

    } - ) + ); } diff --git a/remote-frontend/src/pages/NotFoundPage.tsx b/remote-frontend/src/pages/NotFoundPage.tsx index 2cb51780..6d6db2ee 100644 --- a/remote-frontend/src/pages/NotFoundPage.tsx +++ b/remote-frontend/src/pages/NotFoundPage.tsx @@ -6,5 +6,5 @@ export default function NotFoundPage() {

    Page not found

    - ) + ); } diff --git a/remote-frontend/src/pages/OrganizationPage.tsx b/remote-frontend/src/pages/OrganizationPage.tsx new file mode 100644 index 00000000..c41a2e2e --- /dev/null +++ b/remote-frontend/src/pages/OrganizationPage.tsx @@ -0,0 +1,782 @@ +import { useEffect, useState } from "react"; +import { Link, useParams, useNavigate, useSearchParams } from "react-router-dom"; +import { isLoggedIn } from "../auth"; +import { + getOrganization, + updateOrganization, + deleteOrganization, + listMembers, + removeMember, + updateMemberRole, + listInvitations, + createInvitation, + revokeInvitation, + getProfile, + getGitHubAppStatus, + getGitHubAppInstallUrl, + disconnectGitHubApp, + type Organization, + type OrganizationMemberWithProfile, + type OrganizationInvitation, + type MemberRole, + type GitHubAppStatus, +} from "../api"; + +export default function OrganizationPage() { + const { orgId } = useParams<{ orgId: string }>(); + const navigate = useNavigate(); + const [searchParams, setSearchParams] = useSearchParams(); + + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [organization, setOrganization] = useState(null); + const [userRole, setUserRole] = useState(null); + const [members, setMembers] = useState([]); + const [invitations, setInvitations] = useState([]); + const [currentUserId, setCurrentUserId] = useState(null); + + // GitHub App state + const [githubAppStatus, setGithubAppStatus] = useState(null); + const [githubAppLoading, setGithubAppLoading] = useState(false); + const [githubAppError, setGithubAppError] = useState(null); + const [showGithubDisconnectConfirm, setShowGithubDisconnectConfirm] = useState(false); + const [githubAppSuccess, setGithubAppSuccess] = useState(null); + + // Edit name state + const [isEditingName, setIsEditingName] = useState(false); + const [editedName, setEditedName] = useState(""); + const [editNameError, setEditNameError] = useState(null); + const [editNameLoading, setEditNameLoading] = useState(false); + + // Delete state + const [showDeleteConfirm, setShowDeleteConfirm] = useState(false); + const [deleteLoading, setDeleteLoading] = useState(false); + + // Invite state + const [showInviteForm, setShowInviteForm] = useState(false); + const [inviteEmail, setInviteEmail] = useState(""); + const [inviteRole, setInviteRole] = useState("MEMBER"); + const [inviteLoading, setInviteLoading] = useState(false); + const [inviteError, setInviteError] = useState(null); + + // Action loading states + const [actionLoading, setActionLoading] = useState(null); + + const isAdmin = userRole === "ADMIN"; + + useEffect(() => { + if (!isLoggedIn()) { + navigate("/account", { replace: true }); + return; + } + + if (!orgId) return; + loadData(); + + // Check for GitHub App callback params + const githubAppResult = searchParams.get("github_app"); + const githubAppErrorParam = searchParams.get("github_app_error"); + + if (githubAppResult === "installed") { + setGithubAppSuccess("GitHub App installed successfully!"); + // Clear the query param + searchParams.delete("github_app"); + setSearchParams(searchParams, { replace: true }); + } + + if (githubAppErrorParam) { + setGithubAppError(githubAppErrorParam); + searchParams.delete("github_app_error"); + setSearchParams(searchParams, { replace: true }); + } + }, [orgId, navigate, searchParams, setSearchParams]); + + async function loadData() { + if (!orgId) return; + + try { + const [orgData, membersData, profile] = await Promise.all([ + getOrganization(orgId), + listMembers(orgId), + getProfile(), + ]); + + setOrganization(orgData.organization); + setUserRole(orgData.user_role); + setMembers(membersData); + setCurrentUserId(profile.user_id); + setEditedName(orgData.organization.name); + + // Load invitations if admin + if (orgData.user_role === "ADMIN") { + const invitationsData = await listInvitations(orgId); + setInvitations(invitationsData.filter((i) => i.status === "PENDING")); + } + + // Load GitHub App status for non-personal orgs + if (!orgData.organization.is_personal) { + try { + const ghStatus = await getGitHubAppStatus(orgId); + setGithubAppStatus(ghStatus); + } catch { + // GitHub App may not be configured on the server + setGithubAppStatus(null); + } + } + } catch (e) { + setError(e instanceof Error ? e.message : "Failed to load organization"); + } finally { + setLoading(false); + } + } + + const handleUpdateName = async (e: React.FormEvent) => { + e.preventDefault(); + if (!orgId || !editedName.trim()) return; + + setEditNameLoading(true); + setEditNameError(null); + + try { + const updated = await updateOrganization(orgId, editedName.trim()); + setOrganization(updated); + setIsEditingName(false); + } catch (e) { + setEditNameError(e instanceof Error ? e.message : "Failed to update"); + } finally { + setEditNameLoading(false); + } + }; + + const handleDelete = async () => { + if (!orgId) return; + + setDeleteLoading(true); + + try { + await deleteOrganization(orgId); + navigate("/account", { replace: true }); + } catch (e) { + setError(e instanceof Error ? e.message : "Failed to delete"); + setShowDeleteConfirm(false); + setDeleteLoading(false); + } + }; + + const handleRemoveMember = async (userId: string) => { + if (!orgId) return; + + setActionLoading(userId); + + try { + await removeMember(orgId, userId); + setMembers(members.filter((m) => m.user_id !== userId)); + } catch (e) { + setError(e instanceof Error ? e.message : "Failed to remove member"); + } finally { + setActionLoading(null); + } + }; + + const handleUpdateRole = async (userId: string, newRole: MemberRole) => { + if (!orgId) return; + + setActionLoading(userId); + + try { + await updateMemberRole(orgId, userId, newRole); + setMembers( + members.map((m) => (m.user_id === userId ? { ...m, role: newRole } : m)), + ); + } catch (e) { + setError(e instanceof Error ? e.message : "Failed to update role"); + } finally { + setActionLoading(null); + } + }; + + const handleInvite = async (e: React.FormEvent) => { + e.preventDefault(); + if (!orgId || !inviteEmail.trim()) return; + + setInviteLoading(true); + setInviteError(null); + + try { + const invitation = await createInvitation( + orgId, + inviteEmail.trim(), + inviteRole, + ); + setInvitations([...invitations, invitation]); + setInviteEmail(""); + setShowInviteForm(false); + } catch (e) { + setInviteError(e instanceof Error ? e.message : "Failed to send invite"); + } finally { + setInviteLoading(false); + } + }; + + const handleRevokeInvitation = async (invitationId: string) => { + if (!orgId) return; + + setActionLoading(invitationId); + + try { + await revokeInvitation(orgId, invitationId); + setInvitations(invitations.filter((i) => i.id !== invitationId)); + } catch (e) { + setError(e instanceof Error ? e.message : "Failed to revoke invitation"); + } finally { + setActionLoading(null); + } + }; + + const handleInstallGitHubApp = async () => { + if (!orgId) return; + + setGithubAppLoading(true); + setGithubAppError(null); + + try { + const { install_url } = await getGitHubAppInstallUrl(orgId); + // Redirect to GitHub to install the app + window.location.href = install_url; + } catch (e) { + setGithubAppError(e instanceof Error ? e.message : "Failed to start installation"); + setGithubAppLoading(false); + } + }; + + const handleDisconnectGitHubApp = async () => { + if (!orgId) return; + + setGithubAppLoading(true); + setGithubAppError(null); + + try { + await disconnectGitHubApp(orgId); + setGithubAppStatus({ installed: false, installation: null, repositories: [] }); + setShowGithubDisconnectConfirm(false); + setGithubAppSuccess("GitHub App disconnected"); + } catch (e) { + setGithubAppError(e instanceof Error ? e.message : "Failed to disconnect"); + } finally { + setGithubAppLoading(false); + } + }; + + if (loading) { + return ( +
    +
    Loading...
    +
    + ); + } + + if (error && !organization) { + return ( +
    +
    +

    Error

    +

    {error}

    + + ← Back to account + +
    +
    + ); + } + + return ( +
    +
    + {/* Back link */} + + + + + Back to account + + + {/* Error banner */} + {error && ( +
    +

    {error}

    + +
    + )} + + {/* Organization Details Card */} +
    +
    +
    + {isEditingName ? ( +
    + setEditedName(e.target.value)} + className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent text-lg font-bold" + autoFocus + /> + {editNameError && ( +

    {editNameError}

    + )} +
    + + +
    +
    + ) : ( +
    +

    + {organization?.name} +

    + {isAdmin && !organization?.is_personal && ( + + )} +
    + )} +

    @{organization?.slug}

    +
    +
    + {organization?.is_personal && ( + + Personal + + )} + + {userRole} + +
    +
    + + {/* Delete button (admin only, non-personal) */} + {isAdmin && !organization?.is_personal && ( +
    + {showDeleteConfirm ? ( +
    +

    + Are you sure you want to delete this organization? This + action cannot be undone. +

    +
    + + +
    +
    + ) : ( + + )} +
    + )} +
    + + {/* Members Card */} +
    +
    +

    Members

    + {isAdmin && !organization?.is_personal && ( + + )} +
    + + {/* Invite form */} + {showInviteForm && ( +
    +
    + + setInviteEmail(e.target.value)} + placeholder="user@example.com" + className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent" + required + /> +
    +
    + + +
    + {inviteError && ( +

    {inviteError}

    + )} + +
    + )} + + {/* Members list */} +
    + {members.map((member) => ( +
    +
    + {member.avatar_url ? ( + + ) : ( +
    + + {(member.first_name?.[0] || member.email?.[0] || "?").toUpperCase()} + +
    + )} +
    +

    + {member.first_name || member.username || member.email} + {member.user_id === currentUserId && ( + + {" "} + (you) + + )} +

    +

    {member.email}

    +
    +
    +
    + {isAdmin && !organization?.is_personal ? ( + <> + + {member.user_id !== currentUserId && ( + + )} + + ) : ( + + {member.role} + + )} +
    +
    + ))} +
    +
    + + {/* Pending Invitations Card (admin only) */} + {isAdmin && invitations.length > 0 && ( +
    +

    + Pending Invitations +

    +
    + {invitations.map((invitation) => ( +
    +
    +

    + {invitation.email} +

    +

    + Role: {invitation.role} · Expires{" "} + {new Date(invitation.expires_at).toLocaleDateString()} +

    +
    + +
    + ))} +
    +
    + )} + + {/* GitHub Integration Card (admin only, non-personal orgs) */} + {isAdmin && !organization?.is_personal && githubAppStatus !== null && ( +
    +

    + GitHub Integration +

    +

    + Connect a GitHub App to automatically track pull requests from your repositories. +

    + + {/* Success message */} + {githubAppSuccess && ( +
    +

    {githubAppSuccess}

    + +
    + )} + + {/* Error message */} + {githubAppError && ( +
    +

    {githubAppError}

    + +
    + )} + + {githubAppStatus.installed && githubAppStatus.installation ? ( + // Installed state +
    +
    +
    + + + + + @{githubAppStatus.installation.github_account_login} + + {githubAppStatus.installation.suspended_at && ( + + Suspended + + )} +
    + + Connected + +
    + +
    + {githubAppStatus.installation.repository_selection === "all" ? ( +

    All repositories are being monitored.

    + ) : ( +

    + {githubAppStatus.repositories.length} selected{" "} + {githubAppStatus.repositories.length === 1 + ? "repository" + : "repositories"}{" "} + being monitored. +

    + )} +
    + + {/* Repository list (if selected) */} + {githubAppStatus.installation.repository_selection === "selected" && + githubAppStatus.repositories.length > 0 && ( +
    +

    + Monitored repositories: +

    +
    + {githubAppStatus.repositories.slice(0, 5).map((repo) => ( +

    + {repo.repo_full_name} +

    + ))} + {githubAppStatus.repositories.length > 5 && ( +

    + +{githubAppStatus.repositories.length - 5} more +

    + )} +
    +
    + )} + + {/* Disconnect section */} + {showGithubDisconnectConfirm ? ( +
    +

    + Are you sure you want to disconnect the GitHub App? You will need + to reinstall it from GitHub to reconnect. +

    +
    + + +
    +
    + ) : ( + + )} +
    + ) : ( + // Not installed state +
    + +
    + )} +
    + )} +
    +
    + ); +} diff --git a/remote-frontend/src/pages/ReviewPage.tsx b/remote-frontend/src/pages/ReviewPage.tsx new file mode 100644 index 00000000..349b3799 --- /dev/null +++ b/remote-frontend/src/pages/ReviewPage.tsx @@ -0,0 +1,614 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { useParams } from "react-router-dom"; +import { DiffView, DiffModeEnum } from "@git-diff-view/react"; +import "@git-diff-view/react/styles/diff-view.css"; +import "../styles/diff-overrides.css"; +import { + getReview, + getFileContent, + getDiff, + getReviewMetadata, + type ReviewMetadata, +} from "../api"; +import type { ReviewResult, ReviewComment } from "../types/review"; +import { MarkdownRenderer } from "../components/MarkdownRenderer"; +import { + parseUnifiedDiff, + getFileDiff, + synthesizeFragmentDiff, + type ParsedFileDiff, +} from "../lib/diff-parser"; +import { getHighlightLanguageFromPath } from "../lib/extToLanguage"; +import { CodeFragmentCard } from "../components/CodeFragmentCard"; +import { cn } from "../lib/utils"; + +const DIFF_VIEW_MODE_KEY = "diff-view-mode"; + +function diffHasChanges(diffString: string): boolean { + return diffString.split("\n").some((line) => { + if (!line) return false; + if ( + line.startsWith("--- ") || + line.startsWith("+++ ") || + line.startsWith("@@") + ) + return false; + return line[0] === "+" || line[0] === "-"; + }); +} + +type FileCache = Map; + +export default function ReviewPage() { + const { id } = useParams<{ id: string }>(); + const [review, setReview] = useState(null); + const [metadata, setMetadata] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [fileCache, setFileCache] = useState(new Map()); + const [loadingFiles, setLoadingFiles] = useState>(new Set()); + const [scrollProgress, setScrollProgress] = useState(0); + const [diffText, setDiffText] = useState(""); + const [diffViewMode, setDiffViewMode] = useState(() => { + const saved = localStorage.getItem(DIFF_VIEW_MODE_KEY); + return saved === "split" ? DiffModeEnum.Split : DiffModeEnum.Unified; + }); + const fetchingFiles = useRef>(new Set()); + + const parsedDiffs = useMemo(() => parseUnifiedDiff(diffText), [diffText]); + + const handleViewModeChange = useCallback((mode: DiffModeEnum) => { + setDiffViewMode(mode); + localStorage.setItem( + DIFF_VIEW_MODE_KEY, + mode === DiffModeEnum.Split ? "split" : "unified", + ); + }, []); + + useEffect(() => { + if (!id) return; + // Skip refetch if we already have data for this review (e.g., during HMR) + if (review) return; + + setLoading(true); + setError(null); + + Promise.all([getReview(id), getDiff(id), getReviewMetadata(id)]) + .then(([reviewData, diffData, metadataData]) => { + setReview(reviewData); + setDiffText(diffData); + setMetadata(metadataData); + setLoading(false); + }) + .catch((err) => { + setError(err.message || "Failed to load review"); + setLoading(false); + }); + }, [id, review]); + + const pathToHash = useMemo(() => { + if (!review) return new Map(); + const map = new Map(); + for (const [hash, path] of Object.entries(review.fileHashMap)) { + map.set(path, hash); + } + return map; + }, [review]); + + const fetchFile = useCallback( + async (filePath: string) => { + if (!id || !review) return; + + const hash = pathToHash.get(filePath); + if (!hash) return; + if (fetchingFiles.current.has(filePath)) return; + + fetchingFiles.current.add(filePath); + setLoadingFiles((prev) => new Set(prev).add(filePath)); + + try { + const content = await getFileContent(id, hash); + setFileCache((prev) => new Map(prev).set(filePath, content)); + } catch (err) { + console.error(`Failed to fetch file ${filePath}:`, err); + } finally { + fetchingFiles.current.delete(filePath); + setLoadingFiles((prev) => { + const next = new Set(prev); + next.delete(filePath); + return next; + }); + } + }, + [id, review, pathToHash], + ); + + useEffect(() => { + if (!review) return; + + const allFiles = new Set(); + for (const comment of review.comments) { + for (const fragment of comment.fragments) { + allFiles.add(fragment.file); + } + } + + for (const filePath of allFiles) { + fetchFile(filePath); + } + }, [review, fetchFile]); + + useEffect(() => { + const handleScroll = () => { + const scrollTop = window.scrollY; + const docHeight = + document.documentElement.scrollHeight - window.innerHeight; + const progress = docHeight > 0 ? Math.min(1, scrollTop / docHeight) : 0; + setScrollProgress(progress); + }; + + window.addEventListener("scroll", handleScroll, { passive: true }); + handleScroll(); + return () => window.removeEventListener("scroll", handleScroll); + }, []); + + // Parse PR metadata from the GitHub URL + const prMetadata = useMemo(() => { + if (!metadata) { + return { org: "", repo: "", number: 0, title: "" }; + } + // Parse gh_pr_url: https://github.com/owner/repo/pull/123 + const match = metadata.gh_pr_url.match( + /github\.com\/([^/]+)\/([^/]+)\/pull\/(\d+)/, + ); + if (match) { + return { + org: match[1], + repo: match[2], + number: parseInt(match[3], 10), + title: metadata.pr_title, + }; + } + return { org: "", repo: "", number: 0, title: metadata.pr_title }; + }, [metadata]); + + useEffect(() => { + if (review && prMetadata.title) { + document.title = `Review: ${prMetadata.title} · ${prMetadata.org}/${prMetadata.repo}#${prMetadata.number}`; + } + }, [review, prMetadata]); + + if (loading) { + return ( +
    +
    +
    +

    Loading review...

    +
    +
    + ); + } + + if (error || !review) { + return ( +
    +
    +
    + + + +
    +

    + {error || "Review not found"} +

    +

    + The review you're looking for doesn't exist or has been removed. +

    +
    +
    + ); + } + + const prUrl = + metadata?.gh_pr_url || + `https://github.com/${prMetadata.org}/${prMetadata.repo}/pull/${prMetadata.number}`; + const hasDiff = parsedDiffs.length > 0; + + return ( +
    + {/* Scroll Progress Bar */} +
    +
    +
    + + {/* Header - Two Column Layout - Full Height */} +
    +
    +
    + +
    +

    + {prMetadata.title} ( + + #{prMetadata.number} + + ) +

    +
    +
    + +
    +
    +
    +
    + + Logo + +

    + To make this PR easier to understand and review, an AI agent has + written a review story. The changes are presented in a + clear, logical order, with concise, AI-generated comments that + explain context and highlight what matters.{" "} + + Learn more. + +

    +

    Please scroll to begin

    +
    +
    +
    +
    + + {/* Comments List - Two Column Grid Layout */} + {review.comments.map((comment, idx) => ( + + ))} + + {/* Footer - Promotional */} +
    +
    +

    + Generate AI-powered code reviews for your pull requests +

    + + npx vibe-kanban review https://github.com/owner/repo/pull/123 + +
    +
    + + {/* Fixed Footer Toolbar */} +
    +
    + {/* Left: Logo */} + + review.fast + + + {/* Right: View Toggle */} +
    + View: + + +
    +
    +
    +
    + ); +} + +interface CommentStoryRowProps { + index: number; + totalComments: number; + comment: ReviewComment; + fileCache: FileCache; + loadingFiles: Set; + parsedDiffs: ParsedFileDiff[]; + hasDiff: boolean; + diffViewMode: DiffModeEnum; +} + +function CommentStoryRow({ + index, + totalComments, + comment, + fileCache, + loadingFiles, + parsedDiffs, + hasDiff, + diffViewMode, +}: CommentStoryRowProps) { + const hasComment = comment.comment && comment.comment.trim().length > 0; + + return ( +
    +
    +
    +
    + +
    + {index} + + /{totalComments} + +
    +
    + {hasComment ? ( + + ) : ( + + (No comment text) + + )} +
    +
    +
    + + {/* Right Column - Code Fragments */} +
    + {comment.fragments.length > 0 ? ( + comment.fragments.map((fragment, fIdx) => ( + + )) + ) : ( +
    + No code fragments for this comment. +
    + )} +
    +
    + ); +} + +interface DiffFragmentCardProps { + file: string; + startLine: number; + endLine: number; + message: string; + parsedDiffs: ParsedFileDiff[]; + fileContent?: string; + isLoading?: boolean; + hasDiff: boolean; + diffViewMode: DiffModeEnum; +} + +function DiffFragmentCard({ + file, + startLine, + endLine, + message, + parsedDiffs, + fileContent, + isLoading, + hasDiff, + diffViewMode, +}: DiffFragmentCardProps) { + const fileDiff = useMemo( + () => getFileDiff(parsedDiffs, file), + [parsedDiffs, file], + ); + const lang = getHighlightLanguageFromPath(file); + + const diffData = useMemo(() => { + if (!fileDiff) return null; + + if (!fileContent) return null; + + const diffString = synthesizeFragmentDiff( + fileDiff, + fileContent, + startLine, + endLine, + 3, + ); + + if (!diffString) return null; + + return { + hasChanges: diffHasChanges(diffString), + hunks: [diffString], + oldFile: { fileName: file, fileLang: lang }, + newFile: { fileName: file, fileLang: lang }, + }; + }, [fileDiff, file, lang, startLine, endLine, fileContent]); + + if (!hasDiff || !fileDiff) { + return ( +
    +
    + {file} + + Lines {startLine} + {endLine !== startLine && `–${endLine}`} + +
    + {message && ( +
    + + + + {message} +
    + )} + {isLoading ? ( +
    +
    + Loading... +
    + ) : ( +
    + No diff available for this file. +
    + )} +
    + ); + } + + return ( +
    +
    +
    + {file} +
    +
    + {message && ( +
    + + + +
    + )} +
    + {diffData ? ( + diffData.hasChanges ? ( +
    + +
    + ) : fileContent ? ( + + ) : ( +
    + No changes in this fragment range. +
    + ) + ) : isLoading ? ( +
    +
    + + Loading file content... + +
    + ) : ( +
    + No diff hunks match this fragment range. +
    + )} +
    +
    + ); +} diff --git a/remote-frontend/src/pkce.ts b/remote-frontend/src/pkce.ts index bf40a28e..556e5f39 100644 --- a/remote-frontend/src/pkce.ts +++ b/remote-frontend/src/pkce.ts @@ -1,55 +1,52 @@ export function generateVerifier(): string { - const array = new Uint8Array(32) - crypto.getRandomValues(array) - return base64UrlEncode(array) + const array = new Uint8Array(32); + crypto.getRandomValues(array); + return base64UrlEncode(array); } export async function generateChallenge(verifier: string): Promise { - const encoder = new TextEncoder() - const data = encoder.encode(verifier) - const hash = await crypto.subtle.digest('SHA-256', data) - return bytesToHex(new Uint8Array(hash)) + const encoder = new TextEncoder(); + const data = encoder.encode(verifier); + const hash = await crypto.subtle.digest("SHA-256", data); + return bytesToHex(new Uint8Array(hash)); } function base64UrlEncode(array: Uint8Array): string { - const base64 = btoa(String.fromCharCode(...array)) - return base64 - .replace(/\+/g, '-') - .replace(/\//g, '_') - .replace(/=/g, '') + const base64 = btoa(String.fromCharCode(...array)); + return base64.replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, ""); } function bytesToHex(bytes: Uint8Array): string { - let out = '' + let out = ""; for (let i = 0; i < bytes.length; i++) { - out += bytes[i].toString(16).padStart(2, '0') + out += bytes[i].toString(16).padStart(2, "0"); } - return out + return out; } -const VERIFIER_KEY = 'oauth_verifier' -const TOKEN_KEY = 'invitation_token' +const VERIFIER_KEY = "oauth_verifier"; +const TOKEN_KEY = "invitation_token"; export function storeVerifier(verifier: string): void { - sessionStorage.setItem(VERIFIER_KEY, verifier) + sessionStorage.setItem(VERIFIER_KEY, verifier); } export function retrieveVerifier(): string | null { - return sessionStorage.getItem(VERIFIER_KEY) + return sessionStorage.getItem(VERIFIER_KEY); } export function clearVerifier(): void { - sessionStorage.removeItem(VERIFIER_KEY) + sessionStorage.removeItem(VERIFIER_KEY); } export function storeInvitationToken(token: string): void { - sessionStorage.setItem(TOKEN_KEY, token) + sessionStorage.setItem(TOKEN_KEY, token); } export function retrieveInvitationToken(): string | null { - return sessionStorage.getItem(TOKEN_KEY) + return sessionStorage.getItem(TOKEN_KEY); } export function clearInvitationToken(): void { - sessionStorage.removeItem(TOKEN_KEY) + sessionStorage.removeItem(TOKEN_KEY); } diff --git a/remote-frontend/src/styles/diff-overrides.css b/remote-frontend/src/styles/diff-overrides.css new file mode 100644 index 00000000..e42f4524 --- /dev/null +++ b/remote-frontend/src/styles/diff-overrides.css @@ -0,0 +1,170 @@ +/* Diff view color overrides for dark theme - grey theme instead of GitHub blue */ + +/* Override the library's CSS variables with higher specificity */ +.diff-view-container + .diff-tailwindcss-wrapper[data-theme="dark"] + .diff-style-root, +.diff-view-container .diff-style-root { + --diff-add-content--: rgba(46, 160, 67, 0.15) !important; + --diff-del-content--: rgba(248, 81, 73, 0.15) !important; + --diff-add-content-highlight--: rgba(46, 160, 67, 0.4) !important; + --diff-del-content-highlight--: rgba(248, 81, 73, 0.4) !important; + --diff-add-lineNumber--: rgb(25, 61, 33) !important; + --diff-del-lineNumber--: rgb(88, 42, 39) !important; + --diff-plain-content--: #1e1e1e !important; + --diff-plain-lineNumber--: #1e1e1e !important; + --diff-plain-lineNumber-color--: #6e7681 !important; + --diff-hunk-content--: #2a2a2a !important; + --diff-hunk-lineNumber--: #2a2a2a !important; + --diff-hunk-content-color--: #8b949e !important; + --diff-empty-content--: #1e1e1e !important; + --diff-expand-content--: #1e1e1e !important; + --diff-expand-lineNumber--: #1e1e1e !important; + --diff-expand-lineNumber-color--: #6e7681 !important; + --diff-border--: #292929 !important; +} + +/* Override the library's dark theme background */ +.diff-view-container .diff-style-root, +.diff-view-container [data-theme="dark"] .diff-style-root { + background: #1e1e1e !important; +} + +/* Override syntax highlighting background */ +.diff-view-container .diff-line-syntax-raw .hljs, +.diff-view-container [data-theme="dark"] .diff-line-syntax-raw .hljs { + background: transparent !important; +} + +/* Use JetBrains Mono for code */ +.diff-view-container .diff-line-content-raw, +.diff-view-container .diff-line-syntax-raw, +.diff-view-container .diff-line-num, +.diff-view-container .diff-line-old-num, +.diff-view-container .diff-line-new-num, +.diff-view-container td { + font-family: "JetBrains Mono", "Fira Code", "Consolas", monospace; + font-size: 12px; + line-height: 1.5; +} + +/* Line number styling */ +.diff-view-container .diff-line-num, +.diff-view-container .diff-line-old-num, +.diff-view-container .diff-line-new-num { + color: #6e7681; + padding: 0 8px; + min-width: 40px; +} + +/* Hunk header styling */ +.diff-view-container .diff-line-hunk td, +.diff-view-container .diff-line.diff-line-hunk td { + color: var(--diff-hunk-content-color--); + font-style: italic; +} + +/* Syntax highlighting - GitHub dark theme */ +.diff-view-container .hljs { + color: #c9d1d9; + background: transparent; +} + +.diff-view-container .hljs-doctag, +.diff-view-container .hljs-keyword, +.diff-view-container .hljs-meta .hljs-keyword, +.diff-view-container .hljs-template-tag, +.diff-view-container .hljs-template-variable, +.diff-view-container .hljs-type, +.diff-view-container .hljs-variable.language_ { + color: #ff7b72; +} + +.diff-view-container .hljs-title, +.diff-view-container .hljs-title.class_, +.diff-view-container .hljs-title.class_.inherited__, +.diff-view-container .hljs-title.function_ { + color: #d2a8ff; +} + +.diff-view-container .hljs-attr, +.diff-view-container .hljs-attribute, +.diff-view-container .hljs-literal, +.diff-view-container .hljs-meta, +.diff-view-container .hljs-number, +.diff-view-container .hljs-operator, +.diff-view-container .hljs-variable, +.diff-view-container .hljs-selector-attr, +.diff-view-container .hljs-selector-class, +.diff-view-container .hljs-selector-id { + color: #79c0ff; +} + +.diff-view-container .hljs-regexp, +.diff-view-container .hljs-string, +.diff-view-container .hljs-meta .hljs-string { + color: #a5d6ff; +} + +.diff-view-container .hljs-built_in, +.diff-view-container .hljs-symbol { + color: #ffa657; +} + +.diff-view-container .hljs-comment, +.diff-view-container .hljs-code, +.diff-view-container .hljs-formula { + color: #8b949e; +} + +.diff-view-container .hljs-name, +.diff-view-container .hljs-quote, +.diff-view-container .hljs-selector-tag, +.diff-view-container .hljs-selector-pseudo { + color: #7ee787; +} + +/* Word-level diff highlights */ +.diff-view-container .diff-add-content-highlight { + background-color: var(--diff-add-content-highlight--); +} + +.diff-view-container .diff-del-content-highlight { + background-color: var(--diff-del-content-highlight--); +} + +/* Remove default table borders */ +.diff-view-container table { + border-collapse: collapse; + width: 100%; +} + +.diff-view-container td { + border: none; + padding: 0 10px; + vertical-align: top; +} + +/* Line content should use pre-wrap for proper whitespace */ +.diff-view-container .diff-line-content-raw, +.diff-view-container .diff-line-syntax-raw { + white-space: pre; +} + +/* +/- indicator styling */ +.diff-view-container .diff-line-add-sign, +.diff-view-container .diff-line-del-sign { + user-select: none; + width: 1em; + display: inline-block; +} + +/* Ensure proper scrolling */ +.diff-view-container { + overflow-x: auto; +} + +/* +.diff-view-container .diff-view-wrapper { + min-width: max-content; +} */ diff --git a/remote-frontend/src/types/review.ts b/remote-frontend/src/types/review.ts new file mode 100644 index 00000000..82cecf57 --- /dev/null +++ b/remote-frontend/src/types/review.ts @@ -0,0 +1,17 @@ +export interface CodeFragment { + file: string; + start_line: number; + end_line: number; + message: string; +} + +export interface ReviewComment { + comment: string; + fragments: CodeFragment[]; +} + +export interface ReviewResult { + summary: string; + comments: ReviewComment[]; + fileHashMap: Record; +} diff --git a/remote-frontend/src/vite-env.d.ts b/remote-frontend/src/vite-env.d.ts index 09cd723d..ccd9989d 100644 --- a/remote-frontend/src/vite-env.d.ts +++ b/remote-frontend/src/vite-env.d.ts @@ -1,10 +1,12 @@ /// interface ImportMetaEnv { - readonly VITE_API_BASE_URL: string - readonly VITE_APP_BASE_URL: string + readonly VITE_API_BASE_URL: string; + readonly VITE_APP_BASE_URL: string; + readonly VITE_PUBLIC_POSTHOG_KEY: string; + readonly VITE_PUBLIC_POSTHOG_HOST: string; } interface ImportMeta { - readonly env: ImportMetaEnv + readonly env: ImportMetaEnv; } diff --git a/remote-frontend/tailwind.config.js b/remote-frontend/tailwind.config.js index dca8ba02..62250516 100644 --- a/remote-frontend/tailwind.config.js +++ b/remote-frontend/tailwind.config.js @@ -5,7 +5,51 @@ export default { "./src/**/*.{js,ts,jsx,tsx}", ], theme: { - extend: {}, + extend: { + fontSize: { + xs: ['0.625rem', { lineHeight: '0.875rem' }], + sm: ['0.75rem', { lineHeight: '1rem' }], + base: ['0.875rem', { lineHeight: '1.25rem' }], + lg: ['1rem', { lineHeight: '1.5rem' }], + xl: ['1.125rem', { lineHeight: '1.75rem' }], + }, + colors: { + border: "hsl(var(--border))", + input: "hsl(var(--input))", + ring: "hsl(var(--ring))", + background: "hsl(var(--background))", + foreground: "hsl(var(--foreground))", + primary: { + DEFAULT: "hsl(var(--primary))", + foreground: "hsl(var(--primary-foreground))", + }, + secondary: { + DEFAULT: "hsl(var(--secondary))", + foreground: "hsl(var(--secondary-foreground))", + }, + destructive: { + DEFAULT: "hsl(var(--destructive))", + foreground: "hsl(var(--destructive-foreground))", + }, + muted: { + DEFAULT: "hsl(var(--muted))", + foreground: "hsl(var(--muted-foreground))", + }, + accent: { + DEFAULT: "hsl(var(--accent))", + foreground: "hsl(var(--accent-foreground))", + }, + }, + borderRadius: { + lg: "var(--radius)", + md: "calc(var(--radius) - 2px)", + sm: "calc(var(--radius) - 4px)", + }, + fontFamily: { + 'mono': ['JetBrains Mono', 'Noto Emoji', 'monospace'], + 'sans': ['Inter', 'system-ui', 'sans-serif'], + }, + }, }, plugins: [], }