Remote review (#1521)
This commit is contained in:
committed by
GitHub
parent
5710cc3371
commit
fd9e5e5d79
40
.github/workflows/pre-release.yml
vendored
40
.github/workflows/pre-release.yml
vendored
@@ -234,6 +234,7 @@ jobs:
|
||||
run: |
|
||||
cargo zigbuild --release --target ${{ matrix.target }} -p server
|
||||
cargo zigbuild --release --target ${{ matrix.target }} --bin mcp_task_server
|
||||
cargo zigbuild --release --target ${{ matrix.target }} -p review
|
||||
env:
|
||||
POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }}
|
||||
POSTHOG_API_ENDPOINT: ${{ secrets.POSTHOG_API_ENDPOINT }}
|
||||
@@ -244,6 +245,7 @@ jobs:
|
||||
run: |
|
||||
cargo build --release --target ${{ matrix.target }} -p server
|
||||
cargo build --release --target ${{ matrix.target }} --bin mcp_task_server
|
||||
cargo build --release --target ${{ matrix.target }} -p review
|
||||
env:
|
||||
POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }}
|
||||
POSTHOG_API_ENDPOINT: ${{ secrets.POSTHOG_API_ENDPOINT }}
|
||||
@@ -268,9 +270,11 @@ jobs:
|
||||
if [[ "${{ matrix.os }}" == "windows-latest-l" ]]; then
|
||||
cp target/${{ matrix.target }}/release/server.exe dist/vibe-kanban-${{ matrix.name }}.exe
|
||||
cp target/${{ matrix.target }}/release/mcp_task_server.exe dist/vibe-kanban-mcp-${{ matrix.name }}.exe
|
||||
cp target/${{ matrix.target }}/release/review.exe dist/vibe-kanban-review-${{ matrix.name }}.exe
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/server dist/vibe-kanban-${{ matrix.name }}
|
||||
cp target/${{ matrix.target }}/release/mcp_task_server dist/vibe-kanban-mcp-${{ matrix.name }}
|
||||
cp target/${{ matrix.target }}/release/review dist/vibe-kanban-review-${{ matrix.name }}
|
||||
fi
|
||||
|
||||
# Code signing for macOS only
|
||||
@@ -335,12 +339,38 @@ jobs:
|
||||
notarize: true
|
||||
app_store_connect_api_key_json_file: app_store_key.json
|
||||
|
||||
- name: Sign Review binary (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
uses: indygreg/apple-code-sign-action@v1
|
||||
with:
|
||||
input_path: target/${{ matrix.target }}/release/review
|
||||
output_path: vibe-kanban-review
|
||||
p12_file: certificate.p12
|
||||
p12_password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
sign: true
|
||||
sign_args: "--code-signature-flags=runtime"
|
||||
|
||||
- name: Package Review binary (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
run: zip vibe-kanban-review.zip vibe-kanban-review
|
||||
|
||||
- name: Notarize signed Review binary (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
uses: indygreg/apple-code-sign-action@v1
|
||||
continue-on-error: true
|
||||
with:
|
||||
input_path: vibe-kanban-review.zip
|
||||
sign: false
|
||||
notarize: true
|
||||
app_store_connect_api_key_json_file: app_store_key.json
|
||||
|
||||
- name: Prepare signed binaries (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
mkdir -p dist
|
||||
cp vibe-kanban.zip dist/vibe-kanban-${{ matrix.name }}.zip
|
||||
cp vibe-kanban-mcp.zip dist/vibe-kanban-mcp-${{ matrix.name }}.zip
|
||||
cp vibe-kanban-review.zip dist/vibe-kanban-review-${{ matrix.name }}.zip
|
||||
|
||||
- name: Clean up certificates (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
@@ -367,26 +397,32 @@ jobs:
|
||||
name: linux-x64
|
||||
binary: vibe-kanban
|
||||
mcp_binary: vibe-kanban-mcp
|
||||
review_binary: vibe-kanban-review
|
||||
- target: x86_64-pc-windows-msvc
|
||||
name: windows-x64
|
||||
binary: vibe-kanban.exe
|
||||
mcp_binary: vibe-kanban-mcp.exe
|
||||
review_binary: vibe-kanban-review.exe
|
||||
- target: x86_64-apple-darwin
|
||||
name: macos-x64
|
||||
binary: vibe-kanban
|
||||
mcp_binary: vibe-kanban-mcp
|
||||
review_binary: vibe-kanban-review
|
||||
- target: aarch64-apple-darwin
|
||||
name: macos-arm64
|
||||
binary: vibe-kanban
|
||||
mcp_binary: vibe-kanban-mcp
|
||||
review_binary: vibe-kanban-review
|
||||
- target: aarch64-pc-windows-msvc
|
||||
name: windows-arm64
|
||||
binary: vibe-kanban.exe
|
||||
mcp_binary: vibe-kanban-mcp.exe
|
||||
review_binary: vibe-kanban-review.exe
|
||||
- target: aarch64-unknown-linux-musl
|
||||
name: linux-arm64
|
||||
binary: vibe-kanban
|
||||
mcp_binary: vibe-kanban-mcp
|
||||
review_binary: vibe-kanban-review
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -415,12 +451,15 @@ jobs:
|
||||
mkdir -p npx-cli/dist/${{ matrix.name }}
|
||||
mkdir vibe-kanban-${{ matrix.name }}
|
||||
mkdir vibe-kanban-mcp-${{ matrix.name }}
|
||||
mkdir vibe-kanban-review-${{ matrix.name }}
|
||||
|
||||
cp dist/vibe-kanban-${{ matrix.name }}* vibe-kanban-${{ matrix.name }}/${{ matrix.binary }}
|
||||
cp dist/vibe-kanban-mcp-${{ matrix.name }}* vibe-kanban-mcp-${{ matrix.name }}/${{ matrix.mcp_binary }}
|
||||
cp dist/vibe-kanban-review-${{ matrix.name }}* vibe-kanban-review-${{ matrix.name }}/${{ matrix.review_binary }}
|
||||
|
||||
zip -j npx-cli/dist/${{ matrix.name }}/vibe-kanban.zip vibe-kanban-${{ matrix.name }}/${{ matrix.binary }}
|
||||
zip -j npx-cli/dist/${{ matrix.name }}/vibe-kanban-mcp.zip vibe-kanban-mcp-${{ matrix.name }}/${{ matrix.mcp_binary }}
|
||||
zip -j npx-cli/dist/${{ matrix.name }}/vibe-kanban-review.zip vibe-kanban-review-${{ matrix.name }}/${{ matrix.review_binary }}
|
||||
|
||||
- name: Create platform package (macOS)
|
||||
if: matrix.name == 'macos-arm64' || matrix.name == 'macos-x64'
|
||||
@@ -429,6 +468,7 @@ jobs:
|
||||
mkdir vibe-kanban-${{ matrix.name }}
|
||||
cp dist/vibe-kanban-${{ matrix.name }}* npx-cli/dist/${{ matrix.name }}/vibe-kanban.zip
|
||||
cp dist/vibe-kanban-mcp-${{ matrix.name }}* npx-cli/dist/${{ matrix.name }}/vibe-kanban-mcp.zip
|
||||
cp dist/vibe-kanban-review-${{ matrix.name }}* npx-cli/dist/${{ matrix.name }}/vibe-kanban-review.zip
|
||||
|
||||
- name: Upload platform package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -20,6 +20,7 @@ yarn-error.log*
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.production
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
|
||||
806
Cargo.lock
generated
806
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,8 @@ members = [
|
||||
"crates/utils",
|
||||
"crates/local-deployment",
|
||||
"crates/deployment",
|
||||
"crates/remote"
|
||||
"crates/remote",
|
||||
"crates/review"
|
||||
]
|
||||
|
||||
[workspace.dependencies]
|
||||
|
||||
@@ -16,6 +16,12 @@ ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
ARG POSTHOG_API_KEY
|
||||
ARG POSTHOG_API_ENDPOINT
|
||||
|
||||
ENV VITE_PUBLIC_POSTHOG_KEY=$POSTHOG_API_KEY
|
||||
ENV VITE_PUBLIC_POSTHOG_HOST=$POSTHOG_API_ENDPOINT
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
|
||||
52
crates/remote/.sqlx/query-00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95.json
generated
Normal file
52
crates/remote/.sqlx/query-00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95.json
generated
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id,\n organization_id,\n user_id,\n state_token,\n expires_at,\n created_at\n FROM github_app_pending_installations\n WHERE state_token = $1 AND expires_at > NOW()\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "organization_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "user_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "state_token",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "expires_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "00f50fdb65f4126b197b523f6fc1870571c4c121c32e0c3393f6770fc3608e95"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04.json
generated
Normal file
14
crates/remote/.sqlx/query-18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n UPDATE reviews\n SET status = 'failed'\n WHERE id = $1 AND deleted_at IS NULL\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "18ae849cdeff678538d5bd6782e16780da9db40e4d892a75d7d244f247db5c04"
|
||||
}
|
||||
76
crates/remote/.sqlx/query-40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c.json
generated
Normal file
76
crates/remote/.sqlx/query-40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c.json
generated
Normal file
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n FROM github_app_installations\n WHERE github_account_login = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "organization_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "github_installation_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "github_account_login",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "github_account_type",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "repository_selection",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "installed_by_user_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "suspended_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "updated_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "40c9618c70aae933513bd931a3baace6830d78daacfcbd7af69e4f76a234d01c"
|
||||
}
|
||||
119
crates/remote/.sqlx/query-4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204.json
generated
Normal file
119
crates/remote/.sqlx/query-4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204.json
generated
Normal file
@@ -0,0 +1,119 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO reviews (id, gh_pr_url, r2_path, pr_title, github_installation_id, pr_owner, pr_repo, pr_number)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8)\n RETURNING\n id,\n gh_pr_url,\n claude_code_session_id,\n ip_address AS \"ip_address: IpNetwork\",\n review_cache,\n last_viewed_at,\n r2_path,\n deleted_at,\n created_at,\n email,\n pr_title,\n status,\n github_installation_id,\n pr_owner,\n pr_repo,\n pr_number\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "gh_pr_url",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "claude_code_session_id",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "ip_address: IpNetwork",
|
||||
"type_info": "Inet"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "review_cache",
|
||||
"type_info": "Jsonb"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "last_viewed_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "r2_path",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "deleted_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "email",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "pr_title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "status",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "github_installation_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "pr_owner",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "pr_repo",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "pr_number",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Int8",
|
||||
"Text",
|
||||
"Text",
|
||||
"Int4"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "4447c24a9150eb78d81edc26a441a50ee50b8523c92bfe3ccc82b09518608204"
|
||||
}
|
||||
23
crates/remote/.sqlx/query-4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5.json
generated
Normal file
23
crates/remote/.sqlx/query-4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5.json
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT COUNT(*) as \"count!\"\n FROM reviews\n WHERE ip_address = $1\n AND created_at > $2\n AND deleted_at IS NULL\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "count!",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Inet",
|
||||
"Timestamptz"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "4508b7a46677e8da7a397979a22c1a3e1160c7407b94d7baa84d6a3cdc5667c5"
|
||||
}
|
||||
76
crates/remote/.sqlx/query-471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844.json
generated
Normal file
76
crates/remote/.sqlx/query-471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844.json
generated
Normal file
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n FROM github_app_installations\n WHERE github_installation_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "organization_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "github_installation_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "github_account_login",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "github_account_type",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "repository_selection",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "installed_by_user_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "suspended_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "updated_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "471944787bb9b58a1b30628f28ab8088f60bf3390bfaddbae993e87df89b8844"
|
||||
}
|
||||
12
crates/remote/.sqlx/query-55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe.json
generated
Normal file
12
crates/remote/.sqlx/query-55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe.json
generated
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM github_app_pending_installations\n WHERE expires_at < NOW()\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "55f054b37280bfa43dbea79edd61ba969bacf776c0be43b608b5b0ca3f68c1fe"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c.json
generated
Normal file
14
crates/remote/.sqlx/query-574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n UPDATE github_app_installations\n SET suspended_at = NOW(), updated_at = NOW()\n WHERE github_installation_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "574f50459071d9a400bad0c7623ab1618c6ae90b4a60adb8cb4a75628cb22c1c"
|
||||
}
|
||||
16
crates/remote/.sqlx/query-5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d.json
generated
Normal file
16
crates/remote/.sqlx/query-5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d.json
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO github_app_repositories (installation_id, github_repo_id, repo_full_name)\n VALUES ($1, $2, $3)\n ON CONFLICT (installation_id, github_repo_id) DO UPDATE SET\n repo_full_name = EXCLUDED.repo_full_name\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Int8",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5ce478f8221034468e5ea9ec66051e724d7054f8c62106795bccf9fd5366696d"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119.json
generated
Normal file
14
crates/remote/.sqlx/query-6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM github_app_pending_installations\n WHERE organization_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6205d4d925ce5c7ab8a91e109c807b458a668304ed6262c5afab4b85a227d119"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791.json
generated
Normal file
14
crates/remote/.sqlx/query-79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n UPDATE reviews\n SET status = 'completed'\n WHERE id = $1 AND deleted_at IS NULL\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "79dc2aa6cb26c21530ac05b84ec58aff9b042724bda846aadd9bf1b1a3a53791"
|
||||
}
|
||||
118
crates/remote/.sqlx/query-8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8.json
generated
Normal file
118
crates/remote/.sqlx/query-8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8.json
generated
Normal file
@@ -0,0 +1,118 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO reviews (id, gh_pr_url, claude_code_session_id, ip_address, r2_path, email, pr_title)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n RETURNING\n id,\n gh_pr_url,\n claude_code_session_id,\n ip_address AS \"ip_address: IpNetwork\",\n review_cache,\n last_viewed_at,\n r2_path,\n deleted_at,\n created_at,\n email,\n pr_title,\n status,\n github_installation_id,\n pr_owner,\n pr_repo,\n pr_number\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "gh_pr_url",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "claude_code_session_id",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "ip_address: IpNetwork",
|
||||
"type_info": "Inet"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "review_cache",
|
||||
"type_info": "Jsonb"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "last_viewed_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "r2_path",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "deleted_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "email",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "pr_title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "status",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "github_installation_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "pr_owner",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "pr_repo",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "pr_number",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Text",
|
||||
"Text",
|
||||
"Inet",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "8fc5f7e1920e9d43034aeaacb0a00739e0ee3cd00d06a692beac0f0fb2324ac8"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4.json
generated
Normal file
14
crates/remote/.sqlx/query-9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM github_app_pending_installations\n WHERE state_token = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "9889a5e2b2b849138e5af7bb649c9833cfa4fbc45c3bed269d25a8ada30634e4"
|
||||
}
|
||||
46
crates/remote/.sqlx/query-9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b.json
generated
Normal file
46
crates/remote/.sqlx/query-9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id,\n installation_id,\n github_repo_id,\n repo_full_name,\n created_at\n FROM github_app_repositories\n WHERE installation_id = $1\n ORDER BY repo_full_name\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "installation_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "github_repo_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "repo_full_name",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "9c77a2c9fafd0e5418eb1c67dc8f276910980a22c0eeb17a8f4028d07d36515b"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6.json
generated
Normal file
14
crates/remote/.sqlx/query-a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM github_app_repositories\n WHERE installation_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "a2b8b2f8147c3f5e717a2b361398b1d7af1be7d48c1c943bc93e80f30da6f6d6"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f.json
generated
Normal file
14
crates/remote/.sqlx/query-bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM github_app_installations\n WHERE organization_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "bd632f11a197d6a17fcdf3e757283a64d281a931aaacd1ed6e4b73f18f1b6a2f"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f.json
generated
Normal file
14
crates/remote/.sqlx/query-c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n UPDATE github_app_installations\n SET suspended_at = NULL, updated_at = NOW()\n WHERE github_installation_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "c6cccc00461c95d86edc5a1f66b8228fb438985f6b78f9d83663ecb11d59675f"
|
||||
}
|
||||
112
crates/remote/.sqlx/query-da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b.json
generated
Normal file
112
crates/remote/.sqlx/query-da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b.json
generated
Normal file
@@ -0,0 +1,112 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id,\n gh_pr_url,\n claude_code_session_id,\n ip_address AS \"ip_address: IpNetwork\",\n review_cache,\n last_viewed_at,\n r2_path,\n deleted_at,\n created_at,\n email,\n pr_title,\n status,\n github_installation_id,\n pr_owner,\n pr_repo,\n pr_number\n FROM reviews\n WHERE id = $1 AND deleted_at IS NULL\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "gh_pr_url",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "claude_code_session_id",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "ip_address: IpNetwork",
|
||||
"type_info": "Inet"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "review_cache",
|
||||
"type_info": "Jsonb"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "last_viewed_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "r2_path",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "deleted_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "email",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "pr_title",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "status",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "github_installation_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "pr_owner",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "pr_repo",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "pr_number",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "da660b40d95d5fa5e9176b0b5859bb594e83fc21664f062f29ed148969b17c0b"
|
||||
}
|
||||
14
crates/remote/.sqlx/query-df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a.json
generated
Normal file
14
crates/remote/.sqlx/query-df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM github_app_installations\n WHERE github_installation_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "df27dcabe19b0b1433865256b090f84474986985ec0d204ab17becd6d3568d0a"
|
||||
}
|
||||
55
crates/remote/.sqlx/query-e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c.json
generated
Normal file
55
crates/remote/.sqlx/query-e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c.json
generated
Normal file
@@ -0,0 +1,55 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO github_app_pending_installations (organization_id, user_id, state_token, expires_at)\n VALUES ($1, $2, $3, $4)\n RETURNING\n id,\n organization_id,\n user_id,\n state_token,\n expires_at,\n created_at\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "organization_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "user_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "state_token",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "expires_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Uuid",
|
||||
"Text",
|
||||
"Timestamptz"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e553f31a70abb9d7e39755633f67f2b9c21ab6552986181acc10a1523852655c"
|
||||
}
|
||||
15
crates/remote/.sqlx/query-ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986.json
generated
Normal file
15
crates/remote/.sqlx/query-ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986.json
generated
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n UPDATE github_app_installations\n SET repository_selection = $2, updated_at = NOW()\n WHERE github_installation_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "ea41e984b0e7c1c952cb265659a443de1967c2d024be80ae1d9878e27b474986"
|
||||
}
|
||||
15
crates/remote/.sqlx/query-f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4.json
generated
Normal file
15
crates/remote/.sqlx/query-f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4.json
generated
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM github_app_repositories\n WHERE installation_id = $1 AND github_repo_id = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "f00d3b1e7ce2a7fe5e8e3132e32b7ea50b0d6865f0708b6113bea68a54d857f4"
|
||||
}
|
||||
76
crates/remote/.sqlx/query-f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2.json
generated
Normal file
76
crates/remote/.sqlx/query-f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2.json
generated
Normal file
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n FROM github_app_installations\n WHERE organization_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "organization_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "github_installation_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "github_account_login",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "github_account_type",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "repository_selection",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "installed_by_user_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "suspended_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "updated_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "f360cdb953a3e2fb64123ab8351d42029b58919a0ac0e8900320fee60c5c93b2"
|
||||
}
|
||||
81
crates/remote/.sqlx/query-fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302.json
generated
Normal file
81
crates/remote/.sqlx/query-fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302.json
generated
Normal file
@@ -0,0 +1,81 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO github_app_installations (\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id\n )\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (github_installation_id) DO UPDATE SET\n organization_id = EXCLUDED.organization_id,\n github_account_login = EXCLUDED.github_account_login,\n github_account_type = EXCLUDED.github_account_type,\n repository_selection = EXCLUDED.repository_selection,\n installed_by_user_id = EXCLUDED.installed_by_user_id,\n suspended_at = NULL,\n updated_at = NOW()\n RETURNING\n id,\n organization_id,\n github_installation_id,\n github_account_login,\n github_account_type,\n repository_selection,\n installed_by_user_id,\n suspended_at,\n created_at,\n updated_at\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "organization_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "github_installation_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "github_account_login",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "github_account_type",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "repository_selection",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "installed_by_user_id",
|
||||
"type_info": "Uuid"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "suspended_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "created_at",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "updated_at",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Int8",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Uuid"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "fcffbcc41e058a6d055bec006e7287fcfb26b609107d753e372faeb7f9d92302"
|
||||
}
|
||||
@@ -18,7 +18,8 @@ sentry = { version = "0.41.0", features = ["anyhow", "backtrace", "panic", "debu
|
||||
sentry-tracing = { version = "0.41.0", features = ["backtrace"] }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json", "macros", "migrate"] }
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "json", "macros", "migrate", "ipnetwork"] }
|
||||
ipnetwork = "0.20"
|
||||
tokio = { workspace = true }
|
||||
tokio-stream = { version = "0.1.17", features = ["sync"] }
|
||||
tower-http = { workspace = true }
|
||||
@@ -32,5 +33,15 @@ uuid = { version = "1", features = ["serde", "v4"] }
|
||||
jsonwebtoken = "9"
|
||||
rand = "0.9"
|
||||
sha2 = "0.10"
|
||||
hmac = "0.12"
|
||||
subtle = "2.5"
|
||||
hex = "0.4"
|
||||
urlencoding = "2.1"
|
||||
url = "2.5"
|
||||
base64 = "0.22"
|
||||
aws-config = { version = "1.5", default-features = false, features = ["behavior-version-latest", "rustls"] }
|
||||
aws-sdk-s3 = { version = "1.65", default-features = false, features = ["behavior-version-latest", "rustls"] }
|
||||
aws-credential-types = "1.2"
|
||||
tempfile = "3"
|
||||
tar = "0.4"
|
||||
flate2 = "1.0"
|
||||
|
||||
@@ -46,7 +46,7 @@ FROM debian:bookworm-slim AS runtime
|
||||
ARG APP_NAME
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates libssl3 wget \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates libssl3 wget git \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& useradd --system --create-home --uid 10001 appuser
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
services:
|
||||
remote-db:
|
||||
image: postgres:16-alpine
|
||||
command: ["postgres", "-c", "wal_level=logical"]
|
||||
command: [ "postgres", "-c", "wal_level=logical" ]
|
||||
environment:
|
||||
POSTGRES_DB: remote
|
||||
POSTGRES_USER: remote
|
||||
@@ -57,6 +57,15 @@ services:
|
||||
VITE_APP_BASE_URL: http://localhost:3000
|
||||
VITE_API_BASE_URL: http://localhost:3000
|
||||
ELECTRIC_ROLE_PASSWORD: ${ELECTRIC_ROLE_PASSWORD:?set in .env.remote}
|
||||
R2_ACCESS_KEY_ID: ${R2_ACCESS_KEY_ID:-}
|
||||
R2_SECRET_ACCESS_KEY: ${R2_SECRET_ACCESS_KEY:-}
|
||||
R2_REVIEW_ENDPOINT: ${R2_REVIEW_ENDPOINT:-}
|
||||
R2_REVIEW_BUCKET: ${R2_REVIEW_BUCKET:-}
|
||||
REVIEW_WORKER_BASE_URL: ${REVIEW_WORKER_BASE_URL:-}
|
||||
GITHUB_APP_ID: ${GITHUB_APP_ID:-}
|
||||
GITHUB_APP_PRIVATE_KEY: ${GITHUB_APP_PRIVATE_KEY:-}
|
||||
GITHUB_APP_WEBHOOK_SECRET: ${GITHUB_APP_WEBHOOK_SECRET:-}
|
||||
GITHUB_APP_SLUG: ${GITHUB_APP_SLUG:-}
|
||||
ports:
|
||||
- "127.0.0.1:3000:8081"
|
||||
restart: unless-stopped
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
CREATE TABLE IF NOT EXISTS reviews (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
gh_pr_url TEXT NOT NULL,
|
||||
claude_code_session_id TEXT,
|
||||
ip_address INET NOT NULL,
|
||||
review_cache JSONB,
|
||||
last_viewed_at TIMESTAMPTZ,
|
||||
r2_path TEXT NOT NULL,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
email TEXT NOT NULL,
|
||||
pr_title TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'pending'
|
||||
);
|
||||
|
||||
-- Index for rate limiting queries (IP + time range)
|
||||
CREATE INDEX IF NOT EXISTS idx_reviews_ip_created ON reviews (ip_address, created_at);
|
||||
@@ -0,0 +1,40 @@
|
||||
-- GitHub App installations linked to organizations
|
||||
CREATE TABLE github_app_installations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
organization_id UUID NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
github_installation_id BIGINT NOT NULL UNIQUE,
|
||||
github_account_login TEXT NOT NULL,
|
||||
github_account_type TEXT NOT NULL, -- 'Organization' or 'User'
|
||||
repository_selection TEXT NOT NULL, -- 'all' or 'selected'
|
||||
installed_by_user_id UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||
suspended_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_github_app_installations_org ON github_app_installations(organization_id);
|
||||
|
||||
-- Repositories accessible via an installation
|
||||
CREATE TABLE github_app_repositories (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
installation_id UUID NOT NULL REFERENCES github_app_installations(id) ON DELETE CASCADE,
|
||||
github_repo_id BIGINT NOT NULL,
|
||||
repo_full_name TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE(installation_id, github_repo_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_github_app_repos_installation ON github_app_repositories(installation_id);
|
||||
|
||||
-- Track pending installations (before callback completes)
|
||||
CREATE TABLE github_app_pending_installations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
organization_id UUID NOT NULL REFERENCES organizations(id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
state_token TEXT NOT NULL UNIQUE,
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_pending_installations_state ON github_app_pending_installations(state_token);
|
||||
CREATE INDEX idx_pending_installations_expires ON github_app_pending_installations(expires_at);
|
||||
@@ -0,0 +1,15 @@
|
||||
-- Make email and ip_address nullable for webhook-triggered reviews
|
||||
ALTER TABLE reviews
|
||||
ALTER COLUMN email DROP NOT NULL,
|
||||
ALTER COLUMN ip_address DROP NOT NULL;
|
||||
|
||||
-- Add webhook-specific columns
|
||||
ALTER TABLE reviews
|
||||
ADD COLUMN github_installation_id BIGINT,
|
||||
ADD COLUMN pr_owner TEXT,
|
||||
ADD COLUMN pr_repo TEXT,
|
||||
ADD COLUMN pr_number INTEGER;
|
||||
|
||||
-- Index for webhook reviews
|
||||
CREATE INDEX idx_reviews_webhook ON reviews (github_installation_id)
|
||||
WHERE github_installation_id IS NOT NULL;
|
||||
@@ -12,7 +12,9 @@ use crate::{
|
||||
},
|
||||
config::RemoteServerConfig,
|
||||
db,
|
||||
github_app::GitHubAppService,
|
||||
mail::LoopsMailer,
|
||||
r2::R2Service,
|
||||
routes,
|
||||
};
|
||||
|
||||
@@ -84,7 +86,44 @@ impl Server {
|
||||
)
|
||||
})?;
|
||||
|
||||
let http_client = reqwest::Client::new();
|
||||
let r2 = config.r2.as_ref().map(R2Service::new);
|
||||
if r2.is_some() {
|
||||
tracing::info!("R2 storage service initialized");
|
||||
} else {
|
||||
tracing::warn!(
|
||||
"R2 storage service not configured. Set R2_ACCESS_KEY_ID, R2_SECRET_ACCESS_KEY, R2_REVIEW_ENDPOINT, and R2_REVIEW_BUCKET to enable."
|
||||
);
|
||||
}
|
||||
|
||||
let http_client = reqwest::Client::builder()
|
||||
.user_agent("VibeKanbanRemote/1.0")
|
||||
.build()
|
||||
.context("failed to create HTTP client")?;
|
||||
|
||||
let github_app = match &config.github_app {
|
||||
Some(github_config) => {
|
||||
match GitHubAppService::new(github_config, http_client.clone()) {
|
||||
Ok(service) => {
|
||||
tracing::info!(
|
||||
app_slug = %github_config.app_slug,
|
||||
"GitHub App service initialized"
|
||||
);
|
||||
Some(Arc::new(service))
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(?e, "Failed to initialize GitHub App service");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
tracing::info!(
|
||||
"GitHub App not configured. Set GITHUB_APP_ID, GITHUB_APP_PRIVATE_KEY, GITHUB_APP_WEBHOOK_SECRET, and GITHUB_APP_SLUG to enable."
|
||||
);
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let state = AppState::new(
|
||||
pool.clone(),
|
||||
config.clone(),
|
||||
@@ -94,6 +133,8 @@ impl Server {
|
||||
mailer,
|
||||
server_public_base_url,
|
||||
http_client,
|
||||
r2,
|
||||
github_app,
|
||||
);
|
||||
|
||||
let router = routes::router(state);
|
||||
|
||||
@@ -13,6 +13,105 @@ pub struct RemoteServerConfig {
|
||||
pub electric_url: String,
|
||||
pub electric_secret: Option<SecretString>,
|
||||
pub electric_role_password: Option<SecretString>,
|
||||
pub r2: Option<R2Config>,
|
||||
pub review_worker_base_url: Option<String>,
|
||||
pub github_app: Option<GitHubAppConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct R2Config {
|
||||
pub access_key_id: String,
|
||||
pub secret_access_key: SecretString,
|
||||
pub endpoint: String,
|
||||
pub bucket: String,
|
||||
pub presign_expiry_secs: u64,
|
||||
}
|
||||
|
||||
impl R2Config {
|
||||
pub fn from_env() -> Result<Option<Self>, ConfigError> {
|
||||
let access_key_id = match env::var("R2_ACCESS_KEY_ID") {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
tracing::info!("R2_ACCESS_KEY_ID not set, R2 storage disabled");
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
tracing::info!("R2_ACCESS_KEY_ID is set, checking other R2 env vars");
|
||||
|
||||
let secret_access_key = env::var("R2_SECRET_ACCESS_KEY")
|
||||
.map_err(|_| ConfigError::MissingVar("R2_SECRET_ACCESS_KEY"))?;
|
||||
|
||||
let endpoint = env::var("R2_REVIEW_ENDPOINT")
|
||||
.map_err(|_| ConfigError::MissingVar("R2_REVIEW_ENDPOINT"))?;
|
||||
|
||||
let bucket = env::var("R2_REVIEW_BUCKET")
|
||||
.map_err(|_| ConfigError::MissingVar("R2_REVIEW_BUCKET"))?;
|
||||
|
||||
let presign_expiry_secs = env::var("R2_PRESIGN_EXPIRY_SECS")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(3600);
|
||||
|
||||
tracing::info!(endpoint = %endpoint, bucket = %bucket, "R2 config loaded successfully");
|
||||
|
||||
Ok(Some(Self {
|
||||
access_key_id,
|
||||
secret_access_key: SecretString::new(secret_access_key.into()),
|
||||
endpoint,
|
||||
bucket,
|
||||
presign_expiry_secs,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GitHubAppConfig {
|
||||
pub app_id: u64,
|
||||
pub private_key: SecretString, // Base64-encoded PEM
|
||||
pub webhook_secret: SecretString,
|
||||
pub app_slug: String,
|
||||
}
|
||||
|
||||
impl GitHubAppConfig {
|
||||
pub fn from_env() -> Result<Option<Self>, ConfigError> {
|
||||
let app_id = match env::var("GITHUB_APP_ID") {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
tracing::info!("GITHUB_APP_ID not set, GitHub App integration disabled");
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
let app_id: u64 = app_id
|
||||
.parse()
|
||||
.map_err(|_| ConfigError::InvalidVar("GITHUB_APP_ID"))?;
|
||||
|
||||
tracing::info!("GITHUB_APP_ID is set, checking other GitHub App env vars");
|
||||
|
||||
let private_key = env::var("GITHUB_APP_PRIVATE_KEY")
|
||||
.map_err(|_| ConfigError::MissingVar("GITHUB_APP_PRIVATE_KEY"))?;
|
||||
|
||||
// Validate that the private key is valid base64
|
||||
BASE64_STANDARD
|
||||
.decode(private_key.as_bytes())
|
||||
.map_err(|_| ConfigError::InvalidVar("GITHUB_APP_PRIVATE_KEY"))?;
|
||||
|
||||
let webhook_secret = env::var("GITHUB_APP_WEBHOOK_SECRET")
|
||||
.map_err(|_| ConfigError::MissingVar("GITHUB_APP_WEBHOOK_SECRET"))?;
|
||||
|
||||
let app_slug =
|
||||
env::var("GITHUB_APP_SLUG").map_err(|_| ConfigError::MissingVar("GITHUB_APP_SLUG"))?;
|
||||
|
||||
tracing::info!(app_id = %app_id, app_slug = %app_slug, "GitHub App config loaded successfully");
|
||||
|
||||
Ok(Some(Self {
|
||||
app_id,
|
||||
private_key: SecretString::new(private_key.into()),
|
||||
webhook_secret: SecretString::new(webhook_secret.into()),
|
||||
app_slug,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
@@ -49,6 +148,12 @@ impl RemoteServerConfig {
|
||||
.ok()
|
||||
.map(|s| SecretString::new(s.into()));
|
||||
|
||||
let r2 = R2Config::from_env()?;
|
||||
|
||||
let review_worker_base_url = env::var("REVIEW_WORKER_BASE_URL").ok();
|
||||
|
||||
let github_app = GitHubAppConfig::from_env()?;
|
||||
|
||||
Ok(Self {
|
||||
database_url,
|
||||
listen_addr,
|
||||
@@ -57,6 +162,9 @@ impl RemoteServerConfig {
|
||||
electric_url,
|
||||
electric_secret,
|
||||
electric_role_password,
|
||||
r2,
|
||||
review_worker_base_url,
|
||||
github_app,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
490
crates/remote/src/db/github_app.rs
Normal file
490
crates/remote/src/db/github_app.rs
Normal file
@@ -0,0 +1,490 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use sqlx::{FromRow, PgPool};
|
||||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum GitHubAppDbError {
|
||||
#[error("database error: {0}")]
|
||||
Database(#[from] sqlx::Error),
|
||||
#[error("installation not found")]
|
||||
NotFound,
|
||||
#[error("pending installation not found or expired")]
|
||||
PendingNotFound,
|
||||
}
|
||||
|
||||
/// A GitHub App installation linked to an organization
|
||||
#[derive(Debug, Clone, FromRow)]
|
||||
pub struct GitHubAppInstallation {
|
||||
pub id: Uuid,
|
||||
pub organization_id: Uuid,
|
||||
pub github_installation_id: i64,
|
||||
pub github_account_login: String,
|
||||
pub github_account_type: String,
|
||||
pub repository_selection: String,
|
||||
pub installed_by_user_id: Option<Uuid>,
|
||||
pub suspended_at: Option<DateTime<Utc>>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// A repository accessible via an installation
|
||||
#[derive(Debug, Clone, FromRow)]
|
||||
pub struct GitHubAppRepository {
|
||||
pub id: Uuid,
|
||||
pub installation_id: Uuid,
|
||||
pub github_repo_id: i64,
|
||||
pub repo_full_name: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// A pending installation waiting for callback
|
||||
#[derive(Debug, Clone, FromRow)]
|
||||
pub struct PendingInstallation {
|
||||
pub id: Uuid,
|
||||
pub organization_id: Uuid,
|
||||
pub user_id: Uuid,
|
||||
pub state_token: String,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
pub struct GitHubAppRepository2<'a> {
|
||||
pool: &'a PgPool,
|
||||
}
|
||||
|
||||
impl<'a> GitHubAppRepository2<'a> {
|
||||
pub fn new(pool: &'a PgPool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
// ========== Installations ==========
|
||||
|
||||
pub async fn create_installation(
|
||||
&self,
|
||||
organization_id: Uuid,
|
||||
github_installation_id: i64,
|
||||
github_account_login: &str,
|
||||
github_account_type: &str,
|
||||
repository_selection: &str,
|
||||
installed_by_user_id: Uuid,
|
||||
) -> Result<GitHubAppInstallation, GitHubAppDbError> {
|
||||
let installation = sqlx::query_as!(
|
||||
GitHubAppInstallation,
|
||||
r#"
|
||||
INSERT INTO github_app_installations (
|
||||
organization_id,
|
||||
github_installation_id,
|
||||
github_account_login,
|
||||
github_account_type,
|
||||
repository_selection,
|
||||
installed_by_user_id
|
||||
)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)
|
||||
ON CONFLICT (github_installation_id) DO UPDATE SET
|
||||
organization_id = EXCLUDED.organization_id,
|
||||
github_account_login = EXCLUDED.github_account_login,
|
||||
github_account_type = EXCLUDED.github_account_type,
|
||||
repository_selection = EXCLUDED.repository_selection,
|
||||
installed_by_user_id = EXCLUDED.installed_by_user_id,
|
||||
suspended_at = NULL,
|
||||
updated_at = NOW()
|
||||
RETURNING
|
||||
id,
|
||||
organization_id,
|
||||
github_installation_id,
|
||||
github_account_login,
|
||||
github_account_type,
|
||||
repository_selection,
|
||||
installed_by_user_id,
|
||||
suspended_at,
|
||||
created_at,
|
||||
updated_at
|
||||
"#,
|
||||
organization_id,
|
||||
github_installation_id,
|
||||
github_account_login,
|
||||
github_account_type,
|
||||
repository_selection,
|
||||
installed_by_user_id
|
||||
)
|
||||
.fetch_one(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(installation)
|
||||
}
|
||||
|
||||
pub async fn get_by_github_id(
|
||||
&self,
|
||||
github_installation_id: i64,
|
||||
) -> Result<Option<GitHubAppInstallation>, GitHubAppDbError> {
|
||||
let installation = sqlx::query_as!(
|
||||
GitHubAppInstallation,
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
organization_id,
|
||||
github_installation_id,
|
||||
github_account_login,
|
||||
github_account_type,
|
||||
repository_selection,
|
||||
installed_by_user_id,
|
||||
suspended_at,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM github_app_installations
|
||||
WHERE github_installation_id = $1
|
||||
"#,
|
||||
github_installation_id
|
||||
)
|
||||
.fetch_optional(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(installation)
|
||||
}
|
||||
|
||||
/// Find an installation by the GitHub account login (owner name)
|
||||
pub async fn get_by_account_login(
|
||||
&self,
|
||||
account_login: &str,
|
||||
) -> Result<Option<GitHubAppInstallation>, GitHubAppDbError> {
|
||||
let installation = sqlx::query_as!(
|
||||
GitHubAppInstallation,
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
organization_id,
|
||||
github_installation_id,
|
||||
github_account_login,
|
||||
github_account_type,
|
||||
repository_selection,
|
||||
installed_by_user_id,
|
||||
suspended_at,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM github_app_installations
|
||||
WHERE github_account_login = $1
|
||||
"#,
|
||||
account_login
|
||||
)
|
||||
.fetch_optional(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(installation)
|
||||
}
|
||||
|
||||
pub async fn get_by_organization(
|
||||
&self,
|
||||
organization_id: Uuid,
|
||||
) -> Result<Option<GitHubAppInstallation>, GitHubAppDbError> {
|
||||
let installation = sqlx::query_as!(
|
||||
GitHubAppInstallation,
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
organization_id,
|
||||
github_installation_id,
|
||||
github_account_login,
|
||||
github_account_type,
|
||||
repository_selection,
|
||||
installed_by_user_id,
|
||||
suspended_at,
|
||||
created_at,
|
||||
updated_at
|
||||
FROM github_app_installations
|
||||
WHERE organization_id = $1
|
||||
"#,
|
||||
organization_id
|
||||
)
|
||||
.fetch_optional(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(installation)
|
||||
}
|
||||
|
||||
pub async fn delete_by_github_id(
|
||||
&self,
|
||||
github_installation_id: i64,
|
||||
) -> Result<(), GitHubAppDbError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM github_app_installations
|
||||
WHERE github_installation_id = $1
|
||||
"#,
|
||||
github_installation_id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_by_organization(
|
||||
&self,
|
||||
organization_id: Uuid,
|
||||
) -> Result<(), GitHubAppDbError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM github_app_installations
|
||||
WHERE organization_id = $1
|
||||
"#,
|
||||
organization_id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn suspend(&self, github_installation_id: i64) -> Result<(), GitHubAppDbError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE github_app_installations
|
||||
SET suspended_at = NOW(), updated_at = NOW()
|
||||
WHERE github_installation_id = $1
|
||||
"#,
|
||||
github_installation_id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn unsuspend(&self, github_installation_id: i64) -> Result<(), GitHubAppDbError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE github_app_installations
|
||||
SET suspended_at = NULL, updated_at = NOW()
|
||||
WHERE github_installation_id = $1
|
||||
"#,
|
||||
github_installation_id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_repository_selection(
|
||||
&self,
|
||||
github_installation_id: i64,
|
||||
repository_selection: &str,
|
||||
) -> Result<(), GitHubAppDbError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE github_app_installations
|
||||
SET repository_selection = $2, updated_at = NOW()
|
||||
WHERE github_installation_id = $1
|
||||
"#,
|
||||
github_installation_id,
|
||||
repository_selection
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ========== Repositories ==========
|
||||
|
||||
pub async fn sync_repositories(
|
||||
&self,
|
||||
installation_id: Uuid,
|
||||
repos: &[(i64, String)], // (github_repo_id, repo_full_name)
|
||||
) -> Result<(), GitHubAppDbError> {
|
||||
// Delete all existing repos for this installation
|
||||
sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM github_app_repositories
|
||||
WHERE installation_id = $1
|
||||
"#,
|
||||
installation_id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
// Insert new repos
|
||||
for (github_repo_id, repo_full_name) in repos {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO github_app_repositories (installation_id, github_repo_id, repo_full_name)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (installation_id, github_repo_id) DO UPDATE SET
|
||||
repo_full_name = EXCLUDED.repo_full_name
|
||||
"#,
|
||||
installation_id,
|
||||
github_repo_id,
|
||||
repo_full_name
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_repositories(
|
||||
&self,
|
||||
installation_id: Uuid,
|
||||
) -> Result<Vec<GitHubAppRepository>, GitHubAppDbError> {
|
||||
let repos = sqlx::query_as!(
|
||||
GitHubAppRepository,
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
installation_id,
|
||||
github_repo_id,
|
||||
repo_full_name,
|
||||
created_at
|
||||
FROM github_app_repositories
|
||||
WHERE installation_id = $1
|
||||
ORDER BY repo_full_name
|
||||
"#,
|
||||
installation_id
|
||||
)
|
||||
.fetch_all(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(repos)
|
||||
}
|
||||
|
||||
pub async fn add_repositories(
|
||||
&self,
|
||||
installation_id: Uuid,
|
||||
repos: &[(i64, String)],
|
||||
) -> Result<(), GitHubAppDbError> {
|
||||
for (github_repo_id, repo_full_name) in repos {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO github_app_repositories (installation_id, github_repo_id, repo_full_name)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (installation_id, github_repo_id) DO UPDATE SET
|
||||
repo_full_name = EXCLUDED.repo_full_name
|
||||
"#,
|
||||
installation_id,
|
||||
github_repo_id,
|
||||
repo_full_name
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove_repositories(
|
||||
&self,
|
||||
installation_id: Uuid,
|
||||
github_repo_ids: &[i64],
|
||||
) -> Result<(), GitHubAppDbError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM github_app_repositories
|
||||
WHERE installation_id = $1 AND github_repo_id = ANY($2)
|
||||
"#,
|
||||
installation_id,
|
||||
github_repo_ids
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ========== Pending Installations ==========
|
||||
|
||||
pub async fn create_pending(
|
||||
&self,
|
||||
organization_id: Uuid,
|
||||
user_id: Uuid,
|
||||
state_token: &str,
|
||||
expires_at: DateTime<Utc>,
|
||||
) -> Result<PendingInstallation, GitHubAppDbError> {
|
||||
// Delete any existing pending installation for this org
|
||||
sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM github_app_pending_installations
|
||||
WHERE organization_id = $1
|
||||
"#,
|
||||
organization_id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
let pending = sqlx::query_as!(
|
||||
PendingInstallation,
|
||||
r#"
|
||||
INSERT INTO github_app_pending_installations (organization_id, user_id, state_token, expires_at)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
RETURNING
|
||||
id,
|
||||
organization_id,
|
||||
user_id,
|
||||
state_token,
|
||||
expires_at,
|
||||
created_at
|
||||
"#,
|
||||
organization_id,
|
||||
user_id,
|
||||
state_token,
|
||||
expires_at
|
||||
)
|
||||
.fetch_one(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(pending)
|
||||
}
|
||||
|
||||
pub async fn get_pending_by_state(
|
||||
&self,
|
||||
state_token: &str,
|
||||
) -> Result<Option<PendingInstallation>, GitHubAppDbError> {
|
||||
let pending = sqlx::query_as!(
|
||||
PendingInstallation,
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
organization_id,
|
||||
user_id,
|
||||
state_token,
|
||||
expires_at,
|
||||
created_at
|
||||
FROM github_app_pending_installations
|
||||
WHERE state_token = $1 AND expires_at > NOW()
|
||||
"#,
|
||||
state_token
|
||||
)
|
||||
.fetch_optional(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(pending)
|
||||
}
|
||||
|
||||
pub async fn delete_pending(&self, state_token: &str) -> Result<(), GitHubAppDbError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM github_app_pending_installations
|
||||
WHERE state_token = $1
|
||||
"#,
|
||||
state_token
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn cleanup_expired_pending(&self) -> Result<u64, GitHubAppDbError> {
|
||||
let result = sqlx::query!(
|
||||
r#"
|
||||
DELETE FROM github_app_pending_installations
|
||||
WHERE expires_at < NOW()
|
||||
"#
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod auth;
|
||||
pub mod github_app;
|
||||
pub mod identity_errors;
|
||||
pub mod invitations;
|
||||
pub mod oauth;
|
||||
@@ -6,6 +7,7 @@ pub mod oauth_accounts;
|
||||
pub mod organization_members;
|
||||
pub mod organizations;
|
||||
pub mod projects;
|
||||
pub mod reviews;
|
||||
pub mod tasks;
|
||||
pub mod users;
|
||||
|
||||
|
||||
252
crates/remote/src/db/reviews.rs
Normal file
252
crates/remote/src/db/reviews.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use std::net::IpAddr;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use ipnetwork::IpNetwork;
|
||||
use serde::Serialize;
|
||||
use sqlx::{PgPool, query_as};
|
||||
use thiserror::Error;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ReviewError {
|
||||
#[error("review not found")]
|
||||
NotFound,
|
||||
#[error(transparent)]
|
||||
Database(#[from] sqlx::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, sqlx::FromRow, Serialize)]
|
||||
pub struct Review {
|
||||
pub id: Uuid,
|
||||
pub gh_pr_url: String,
|
||||
pub claude_code_session_id: Option<String>,
|
||||
pub ip_address: Option<IpNetwork>,
|
||||
pub review_cache: Option<serde_json::Value>,
|
||||
pub last_viewed_at: Option<DateTime<Utc>>,
|
||||
pub r2_path: String,
|
||||
pub deleted_at: Option<DateTime<Utc>>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub email: Option<String>,
|
||||
pub pr_title: String,
|
||||
pub status: String,
|
||||
// Webhook-specific fields
|
||||
pub github_installation_id: Option<i64>,
|
||||
pub pr_owner: Option<String>,
|
||||
pub pr_repo: Option<String>,
|
||||
pub pr_number: Option<i32>,
|
||||
}
|
||||
|
||||
impl Review {
|
||||
/// Returns true if this review was triggered by a GitHub webhook
|
||||
pub fn is_webhook_review(&self) -> bool {
|
||||
self.github_installation_id.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
/// Parameters for creating a new review (CLI-triggered)
|
||||
pub struct CreateReviewParams<'a> {
|
||||
pub id: Uuid,
|
||||
pub gh_pr_url: &'a str,
|
||||
pub claude_code_session_id: Option<&'a str>,
|
||||
pub ip_address: IpAddr,
|
||||
pub r2_path: &'a str,
|
||||
pub email: &'a str,
|
||||
pub pr_title: &'a str,
|
||||
}
|
||||
|
||||
/// Parameters for creating a webhook-triggered review
|
||||
pub struct CreateWebhookReviewParams<'a> {
|
||||
pub id: Uuid,
|
||||
pub gh_pr_url: &'a str,
|
||||
pub r2_path: &'a str,
|
||||
pub pr_title: &'a str,
|
||||
pub github_installation_id: i64,
|
||||
pub pr_owner: &'a str,
|
||||
pub pr_repo: &'a str,
|
||||
pub pr_number: i32,
|
||||
}
|
||||
|
||||
pub struct ReviewRepository<'a> {
|
||||
pool: &'a PgPool,
|
||||
}
|
||||
|
||||
impl<'a> ReviewRepository<'a> {
|
||||
pub fn new(pool: &'a PgPool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
pub async fn create(&self, params: CreateReviewParams<'_>) -> Result<Review, ReviewError> {
|
||||
let ip_network = IpNetwork::from(params.ip_address);
|
||||
|
||||
query_as!(
|
||||
Review,
|
||||
r#"
|
||||
INSERT INTO reviews (id, gh_pr_url, claude_code_session_id, ip_address, r2_path, email, pr_title)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING
|
||||
id,
|
||||
gh_pr_url,
|
||||
claude_code_session_id,
|
||||
ip_address AS "ip_address: IpNetwork",
|
||||
review_cache,
|
||||
last_viewed_at,
|
||||
r2_path,
|
||||
deleted_at,
|
||||
created_at,
|
||||
email,
|
||||
pr_title,
|
||||
status,
|
||||
github_installation_id,
|
||||
pr_owner,
|
||||
pr_repo,
|
||||
pr_number
|
||||
"#,
|
||||
params.id,
|
||||
params.gh_pr_url,
|
||||
params.claude_code_session_id,
|
||||
ip_network,
|
||||
params.r2_path,
|
||||
params.email,
|
||||
params.pr_title
|
||||
)
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(ReviewError::from)
|
||||
}
|
||||
|
||||
/// Create a webhook-triggered review (no email/IP)
|
||||
pub async fn create_webhook_review(
|
||||
&self,
|
||||
params: CreateWebhookReviewParams<'_>,
|
||||
) -> Result<Review, ReviewError> {
|
||||
query_as!(
|
||||
Review,
|
||||
r#"
|
||||
INSERT INTO reviews (id, gh_pr_url, r2_path, pr_title, github_installation_id, pr_owner, pr_repo, pr_number)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||
RETURNING
|
||||
id,
|
||||
gh_pr_url,
|
||||
claude_code_session_id,
|
||||
ip_address AS "ip_address: IpNetwork",
|
||||
review_cache,
|
||||
last_viewed_at,
|
||||
r2_path,
|
||||
deleted_at,
|
||||
created_at,
|
||||
email,
|
||||
pr_title,
|
||||
status,
|
||||
github_installation_id,
|
||||
pr_owner,
|
||||
pr_repo,
|
||||
pr_number
|
||||
"#,
|
||||
params.id,
|
||||
params.gh_pr_url,
|
||||
params.r2_path,
|
||||
params.pr_title,
|
||||
params.github_installation_id,
|
||||
params.pr_owner,
|
||||
params.pr_repo,
|
||||
params.pr_number
|
||||
)
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(ReviewError::from)
|
||||
}
|
||||
|
||||
/// Get a review by its ID.
|
||||
/// Returns NotFound if the review doesn't exist or has been deleted.
|
||||
pub async fn get_by_id(&self, id: Uuid) -> Result<Review, ReviewError> {
|
||||
query_as!(
|
||||
Review,
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
gh_pr_url,
|
||||
claude_code_session_id,
|
||||
ip_address AS "ip_address: IpNetwork",
|
||||
review_cache,
|
||||
last_viewed_at,
|
||||
r2_path,
|
||||
deleted_at,
|
||||
created_at,
|
||||
email,
|
||||
pr_title,
|
||||
status,
|
||||
github_installation_id,
|
||||
pr_owner,
|
||||
pr_repo,
|
||||
pr_number
|
||||
FROM reviews
|
||||
WHERE id = $1 AND deleted_at IS NULL
|
||||
"#,
|
||||
id
|
||||
)
|
||||
.fetch_optional(self.pool)
|
||||
.await?
|
||||
.ok_or(ReviewError::NotFound)
|
||||
}
|
||||
|
||||
/// Count reviews from an IP address since a given timestamp.
|
||||
/// Used for rate limiting.
|
||||
pub async fn count_since(
|
||||
&self,
|
||||
ip_address: IpAddr,
|
||||
since: DateTime<Utc>,
|
||||
) -> Result<i64, ReviewError> {
|
||||
let ip_network = IpNetwork::from(ip_address);
|
||||
|
||||
let result = sqlx::query!(
|
||||
r#"
|
||||
SELECT COUNT(*) as "count!"
|
||||
FROM reviews
|
||||
WHERE ip_address = $1
|
||||
AND created_at > $2
|
||||
AND deleted_at IS NULL
|
||||
"#,
|
||||
ip_network,
|
||||
since
|
||||
)
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(ReviewError::from)?;
|
||||
|
||||
Ok(result.count)
|
||||
}
|
||||
|
||||
/// Mark a review as completed
|
||||
pub async fn mark_completed(&self, id: Uuid) -> Result<(), ReviewError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE reviews
|
||||
SET status = 'completed'
|
||||
WHERE id = $1 AND deleted_at IS NULL
|
||||
"#,
|
||||
id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(ReviewError::from)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Mark a review as failed
|
||||
pub async fn mark_failed(&self, id: Uuid) -> Result<(), ReviewError> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE reviews
|
||||
SET status = 'failed'
|
||||
WHERE id = $1 AND deleted_at IS NULL
|
||||
"#,
|
||||
id
|
||||
)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(ReviewError::from)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
97
crates/remote/src/github_app/jwt.rs
Normal file
97
crates/remote/src/github_app/jwt.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
|
||||
use jsonwebtoken::{Algorithm, EncodingKey, Header, encode};
|
||||
use secrecy::{ExposeSecret, SecretString};
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
/// JWT generator for GitHub App authentication.
|
||||
/// GitHub Apps authenticate using RS256-signed JWTs with a 10-minute max TTL.
|
||||
#[derive(Clone)]
|
||||
pub struct GitHubAppJwt {
|
||||
app_id: u64,
|
||||
private_key_pem: SecretString,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum JwtError {
|
||||
#[error("invalid private key: {0}")]
|
||||
InvalidPrivateKey(String),
|
||||
#[error("failed to encode JWT: {0}")]
|
||||
EncodingError(#[from] jsonwebtoken::errors::Error),
|
||||
#[error("invalid base64 encoding")]
|
||||
Base64Error,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct GitHubAppClaims {
|
||||
/// Issuer - the GitHub App ID
|
||||
iss: String,
|
||||
/// Issued at (Unix timestamp)
|
||||
iat: i64,
|
||||
/// Expiration (Unix timestamp) - max 10 minutes from iat
|
||||
exp: i64,
|
||||
}
|
||||
|
||||
impl GitHubAppJwt {
|
||||
/// Create a new JWT generator from base64-encoded PEM private key
|
||||
pub fn new(app_id: u64, private_key_base64: SecretString) -> Result<Self, JwtError> {
|
||||
// Decode base64 to get raw PEM
|
||||
let pem_bytes = BASE64_STANDARD
|
||||
.decode(private_key_base64.expose_secret().as_bytes())
|
||||
.map_err(|_| JwtError::Base64Error)?;
|
||||
|
||||
let pem_string = String::from_utf8(pem_bytes)
|
||||
.map_err(|_| JwtError::InvalidPrivateKey("PEM is not valid UTF-8".to_string()))?;
|
||||
|
||||
// Validate we can parse this as an RSA key
|
||||
EncodingKey::from_rsa_pem(pem_string.as_bytes())
|
||||
.map_err(|e| JwtError::InvalidPrivateKey(e.to_string()))?;
|
||||
|
||||
Ok(Self {
|
||||
app_id,
|
||||
private_key_pem: SecretString::new(pem_string.into()),
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate a JWT for authenticating as the GitHub App.
|
||||
/// This JWT is used to get installation access tokens.
|
||||
/// Max TTL is 10 minutes as per GitHub's requirements.
|
||||
pub fn generate(&self) -> Result<String, JwtError> {
|
||||
let now = chrono::Utc::now().timestamp();
|
||||
// Subtract 60 seconds from iat to account for clock drift
|
||||
let iat = now - 60;
|
||||
// GitHub allows max 10 minutes, we use 9 to be safe
|
||||
let exp = now + (9 * 60);
|
||||
|
||||
let claims = GitHubAppClaims {
|
||||
iss: self.app_id.to_string(),
|
||||
iat,
|
||||
exp,
|
||||
};
|
||||
|
||||
let header = Header::new(Algorithm::RS256);
|
||||
let key = EncodingKey::from_rsa_pem(self.private_key_pem.expose_secret().as_bytes())?;
|
||||
|
||||
encode(&header, &claims, &key).map_err(JwtError::EncodingError)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// Test with a dummy key - in real tests you'd use a proper test key
|
||||
#[test]
|
||||
fn test_invalid_base64_fails() {
|
||||
let result = GitHubAppJwt::new(12345, SecretString::new("not-valid-base64!!!".into()));
|
||||
assert!(matches!(result, Err(JwtError::Base64Error)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_pem_fails() {
|
||||
// Valid base64, but not a valid PEM
|
||||
let invalid_pem_b64 = BASE64_STANDARD.encode("not a real pem key");
|
||||
let result = GitHubAppJwt::new(12345, SecretString::new(invalid_pem_b64.into()));
|
||||
assert!(matches!(result, Err(JwtError::InvalidPrivateKey(_))));
|
||||
}
|
||||
}
|
||||
9
crates/remote/src/github_app/mod.rs
Normal file
9
crates/remote/src/github_app/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod jwt;
|
||||
mod pr_review;
|
||||
mod service;
|
||||
mod webhook;
|
||||
|
||||
pub use jwt::GitHubAppJwt;
|
||||
pub use pr_review::{PrReviewError, PrReviewParams, PrReviewService};
|
||||
pub use service::{GitHubAppService, InstallationInfo, PrDetails, PrRef, Repository};
|
||||
pub use webhook::verify_webhook_signature;
|
||||
243
crates/remote/src/github_app/pr_review.rs
Normal file
243
crates/remote/src/github_app/pr_review.rs
Normal file
@@ -0,0 +1,243 @@
|
||||
//! PR Review service for webhook-triggered code reviews.
|
||||
|
||||
use std::{fs::File, path::Path};
|
||||
|
||||
use flate2::{Compression, write::GzEncoder};
|
||||
use reqwest::Client;
|
||||
use sqlx::PgPool;
|
||||
use tar::Builder;
|
||||
use thiserror::Error;
|
||||
use tracing::{debug, error, info};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::service::{GitHubAppError, GitHubAppService};
|
||||
use crate::{
|
||||
db::reviews::{CreateWebhookReviewParams, ReviewError, ReviewRepository},
|
||||
r2::{R2Error, R2Service},
|
||||
};
|
||||
|
||||
/// Parameters extracted from webhook payload for PR review
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrReviewParams {
|
||||
pub installation_id: i64,
|
||||
pub owner: String,
|
||||
pub repo: String,
|
||||
pub pr_number: u64,
|
||||
pub pr_title: String,
|
||||
pub pr_body: String,
|
||||
pub head_sha: String,
|
||||
pub base_sha: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum PrReviewError {
|
||||
#[error("GitHub error: {0}")]
|
||||
GitHub(#[from] GitHubAppError),
|
||||
#[error("R2 error: {0}")]
|
||||
R2(#[from] R2Error),
|
||||
#[error("Database error: {0}")]
|
||||
Database(#[from] ReviewError),
|
||||
#[error("Archive error: {0}")]
|
||||
Archive(String),
|
||||
#[error("Worker error: {0}")]
|
||||
Worker(String),
|
||||
}
|
||||
|
||||
/// Service for processing webhook-triggered PR reviews
|
||||
pub struct PrReviewService {
|
||||
github_app: GitHubAppService,
|
||||
r2: R2Service,
|
||||
http_client: Client,
|
||||
worker_base_url: String,
|
||||
server_base_url: String,
|
||||
}
|
||||
|
||||
impl PrReviewService {
|
||||
pub fn new(
|
||||
github_app: GitHubAppService,
|
||||
r2: R2Service,
|
||||
http_client: Client,
|
||||
worker_base_url: String,
|
||||
server_base_url: String,
|
||||
) -> Self {
|
||||
Self {
|
||||
github_app,
|
||||
r2,
|
||||
http_client,
|
||||
worker_base_url,
|
||||
server_base_url,
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a PR review from webhook.
|
||||
///
|
||||
/// This will:
|
||||
/// 1. Clone the repository at the PR head commit
|
||||
/// 2. Create a tarball of the repository
|
||||
/// 3. Upload the tarball to R2
|
||||
/// 4. Create a review record in the database
|
||||
/// 5. Start the review worker
|
||||
///
|
||||
/// Returns the review ID on success.
|
||||
pub async fn process_pr_review(
|
||||
&self,
|
||||
pool: &PgPool,
|
||||
params: PrReviewParams,
|
||||
) -> Result<Uuid, PrReviewError> {
|
||||
let review_id = Uuid::new_v4();
|
||||
|
||||
info!(
|
||||
review_id = %review_id,
|
||||
owner = %params.owner,
|
||||
repo = %params.repo,
|
||||
pr_number = params.pr_number,
|
||||
"Starting webhook PR review"
|
||||
);
|
||||
|
||||
// 1. Clone the repository
|
||||
let temp_dir = self
|
||||
.github_app
|
||||
.clone_repo(
|
||||
params.installation_id,
|
||||
¶ms.owner,
|
||||
¶ms.repo,
|
||||
¶ms.head_sha,
|
||||
)
|
||||
.await?;
|
||||
|
||||
debug!(review_id = %review_id, "Repository cloned");
|
||||
|
||||
// 2. Create tarball
|
||||
let tarball =
|
||||
create_tarball(temp_dir.path()).map_err(|e| PrReviewError::Archive(e.to_string()))?;
|
||||
|
||||
let tarball_size_mb = tarball.len() as f64 / 1_048_576.0;
|
||||
debug!(review_id = %review_id, size_mb = tarball_size_mb, "Tarball created");
|
||||
|
||||
// 3. Upload to R2
|
||||
let r2_path = self.r2.upload_bytes(review_id, tarball).await?;
|
||||
debug!(review_id = %review_id, r2_path = %r2_path, "Uploaded to R2");
|
||||
|
||||
// 4. Create review record in database
|
||||
let gh_pr_url = format!(
|
||||
"https://github.com/{}/{}/pull/{}",
|
||||
params.owner, params.repo, params.pr_number
|
||||
);
|
||||
|
||||
let repo = ReviewRepository::new(pool);
|
||||
repo.create_webhook_review(CreateWebhookReviewParams {
|
||||
id: review_id,
|
||||
gh_pr_url: &gh_pr_url,
|
||||
r2_path: &r2_path,
|
||||
pr_title: ¶ms.pr_title,
|
||||
github_installation_id: params.installation_id,
|
||||
pr_owner: ¶ms.owner,
|
||||
pr_repo: ¶ms.repo,
|
||||
pr_number: params.pr_number as i32,
|
||||
})
|
||||
.await?;
|
||||
|
||||
debug!(review_id = %review_id, "Review record created");
|
||||
|
||||
// 5. Start the review worker
|
||||
let codebase_url = format!(
|
||||
"{}/reviews/{}/payload.tar.gz",
|
||||
self.r2_public_url(),
|
||||
review_id
|
||||
);
|
||||
let callback_url = format!("{}/review/{}", self.server_base_url, review_id);
|
||||
|
||||
let start_request = serde_json::json!({
|
||||
"id": review_id.to_string(),
|
||||
"title": params.pr_title,
|
||||
"description": params.pr_body,
|
||||
"org": params.owner,
|
||||
"repo": params.repo,
|
||||
"codebaseUrl": codebase_url,
|
||||
"baseCommit": params.base_sha,
|
||||
"callbackUrl": callback_url,
|
||||
});
|
||||
|
||||
let response = self
|
||||
.http_client
|
||||
.post(format!("{}/review/start", self.worker_base_url))
|
||||
.json(&start_request)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| PrReviewError::Worker(format!("Failed to call worker: {e}")))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
error!(review_id = %review_id, status = %status, body = %body, "Worker returned error");
|
||||
return Err(PrReviewError::Worker(format!(
|
||||
"Worker returned {}: {}",
|
||||
status, body
|
||||
)));
|
||||
}
|
||||
|
||||
info!(review_id = %review_id, "Review worker started successfully");
|
||||
|
||||
Ok(review_id)
|
||||
}
|
||||
|
||||
/// Get the public URL for R2 (used to construct codebase URLs for the worker).
|
||||
/// This assumes the R2 bucket has public read access configured.
|
||||
fn r2_public_url(&self) -> &str {
|
||||
// The worker needs to be able to fetch the tarball from R2.
|
||||
// This is typically configured via a public bucket URL or CDN.
|
||||
// For now, we'll use the worker base URL as a proxy assumption.
|
||||
// In production, this should be configured separately.
|
||||
&self.worker_base_url
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a tar.gz archive from a directory
|
||||
fn create_tarball(source_dir: &Path) -> Result<Vec<u8>, std::io::Error> {
|
||||
debug!("Creating tarball from {}", source_dir.display());
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
{
|
||||
let encoder = GzEncoder::new(&mut buffer, Compression::default());
|
||||
let mut archive = Builder::new(encoder);
|
||||
|
||||
add_directory_to_archive(&mut archive, source_dir, source_dir)?;
|
||||
|
||||
let encoder = archive.into_inner()?;
|
||||
encoder.finish()?;
|
||||
}
|
||||
|
||||
debug!("Created tarball: {} bytes", buffer.len());
|
||||
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
fn add_directory_to_archive<W: std::io::Write>(
|
||||
archive: &mut Builder<W>,
|
||||
base_dir: &Path,
|
||||
current_dir: &Path,
|
||||
) -> Result<(), std::io::Error> {
|
||||
let entries = std::fs::read_dir(current_dir)?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
let relative_path = path.strip_prefix(base_dir).map_err(std::io::Error::other)?;
|
||||
|
||||
let metadata = entry.metadata()?;
|
||||
|
||||
if metadata.is_dir() {
|
||||
// Recursively add directory contents
|
||||
add_directory_to_archive(archive, base_dir, &path)?;
|
||||
} else if metadata.is_file() {
|
||||
// Add file to archive
|
||||
let mut file = File::open(&path)?;
|
||||
archive.append_file(relative_path, &mut file)?;
|
||||
}
|
||||
// Skip symlinks and other special files
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
387
crates/remote/src/github_app/service.rs
Normal file
387
crates/remote/src/github_app/service.rs
Normal file
@@ -0,0 +1,387 @@
|
||||
use reqwest::Client;
|
||||
use secrecy::SecretString;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tempfile::TempDir;
|
||||
use thiserror::Error;
|
||||
use tokio::process::Command;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
use super::jwt::{GitHubAppJwt, JwtError};
|
||||
use crate::config::GitHubAppConfig;
|
||||
|
||||
const USER_AGENT: &str = "VibeKanbanRemote/1.0";
|
||||
const GITHUB_API_BASE: &str = "https://api.github.com";
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum GitHubAppError {
|
||||
#[error("JWT error: {0}")]
|
||||
Jwt(#[from] JwtError),
|
||||
#[error("HTTP request failed: {0}")]
|
||||
Http(#[from] reqwest::Error),
|
||||
#[error("GitHub API error: {status} - {message}")]
|
||||
Api { status: u16, message: String },
|
||||
#[error("Installation not found")]
|
||||
InstallationNotFound,
|
||||
#[error("Git operation failed: {0}")]
|
||||
GitOperation(String),
|
||||
}
|
||||
|
||||
/// Information about a GitHub App installation
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct InstallationInfo {
|
||||
pub id: i64,
|
||||
pub account: InstallationAccount,
|
||||
pub repository_selection: String, // "all" or "selected"
|
||||
pub suspended_at: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct InstallationAccount {
|
||||
pub login: String,
|
||||
#[serde(rename = "type")]
|
||||
pub account_type: String, // "Organization" or "User"
|
||||
pub id: i64,
|
||||
}
|
||||
|
||||
/// A repository accessible via an installation
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Repository {
|
||||
pub id: i64,
|
||||
pub full_name: String,
|
||||
pub name: String,
|
||||
pub private: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct InstallationTokenResponse {
|
||||
token: String,
|
||||
expires_at: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct RepositoriesResponse {
|
||||
repositories: Vec<Repository>,
|
||||
}
|
||||
|
||||
/// Details about a pull request
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct PrDetails {
|
||||
pub title: String,
|
||||
pub body: Option<String>,
|
||||
pub head: PrRef,
|
||||
pub base: PrRef,
|
||||
}
|
||||
|
||||
/// A git ref (branch/commit) in a PR
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct PrRef {
|
||||
pub sha: String,
|
||||
}
|
||||
|
||||
/// Service for interacting with the GitHub App API
|
||||
#[derive(Clone)]
|
||||
pub struct GitHubAppService {
|
||||
jwt_generator: GitHubAppJwt,
|
||||
client: Client,
|
||||
app_slug: String,
|
||||
webhook_secret: SecretString,
|
||||
}
|
||||
|
||||
impl GitHubAppService {
|
||||
pub fn new(config: &GitHubAppConfig, client: Client) -> Result<Self, GitHubAppError> {
|
||||
let jwt_generator = GitHubAppJwt::new(config.app_id, config.private_key.clone())?;
|
||||
|
||||
Ok(Self {
|
||||
jwt_generator,
|
||||
client,
|
||||
app_slug: config.app_slug.clone(),
|
||||
webhook_secret: config.webhook_secret.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the app slug for constructing installation URLs
|
||||
pub fn app_slug(&self) -> &str {
|
||||
&self.app_slug
|
||||
}
|
||||
|
||||
/// Get the webhook secret for signature verification
|
||||
pub fn webhook_secret(&self) -> &SecretString {
|
||||
&self.webhook_secret
|
||||
}
|
||||
|
||||
/// Get an installation access token for making API calls on behalf of an installation
|
||||
pub async fn get_installation_token(
|
||||
&self,
|
||||
installation_id: i64,
|
||||
) -> Result<String, GitHubAppError> {
|
||||
let jwt = self.jwt_generator.generate()?;
|
||||
|
||||
let url = format!(
|
||||
"{}/app/installations/{}/access_tokens",
|
||||
GITHUB_API_BASE, installation_id
|
||||
);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", jwt))
|
||||
.header("Accept", "application/vnd.github+json")
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.header("X-GitHub-Api-Version", "2022-11-28")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let message = response.text().await.unwrap_or_default();
|
||||
warn!(
|
||||
installation_id,
|
||||
status, message, "Failed to get installation token"
|
||||
);
|
||||
return Err(GitHubAppError::Api { status, message });
|
||||
}
|
||||
|
||||
let token_response: InstallationTokenResponse = response.json().await?;
|
||||
info!(
|
||||
installation_id,
|
||||
expires_at = %token_response.expires_at,
|
||||
"Got installation access token"
|
||||
);
|
||||
|
||||
Ok(token_response.token)
|
||||
}
|
||||
|
||||
/// Get details about a specific installation
|
||||
pub async fn get_installation(
|
||||
&self,
|
||||
installation_id: i64,
|
||||
) -> Result<InstallationInfo, GitHubAppError> {
|
||||
let jwt = self.jwt_generator.generate()?;
|
||||
|
||||
let url = format!("{}/app/installations/{}", GITHUB_API_BASE, installation_id);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("Authorization", format!("Bearer {}", jwt))
|
||||
.header("Accept", "application/vnd.github+json")
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.header("X-GitHub-Api-Version", "2022-11-28")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() == reqwest::StatusCode::NOT_FOUND {
|
||||
return Err(GitHubAppError::InstallationNotFound);
|
||||
}
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let message = response.text().await.unwrap_or_default();
|
||||
return Err(GitHubAppError::Api { status, message });
|
||||
}
|
||||
|
||||
let installation: InstallationInfo = response.json().await?;
|
||||
Ok(installation)
|
||||
}
|
||||
|
||||
/// List repositories accessible to an installation
|
||||
pub async fn list_installation_repos(
|
||||
&self,
|
||||
installation_id: i64,
|
||||
) -> Result<Vec<Repository>, GitHubAppError> {
|
||||
let token = self.get_installation_token(installation_id).await?;
|
||||
|
||||
let url = format!("{}/installation/repositories", GITHUB_API_BASE);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("Authorization", format!("Bearer {}", token))
|
||||
.header("Accept", "application/vnd.github+json")
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.header("X-GitHub-Api-Version", "2022-11-28")
|
||||
.query(&[("per_page", "100")])
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let message = response.text().await.unwrap_or_default();
|
||||
return Err(GitHubAppError::Api { status, message });
|
||||
}
|
||||
|
||||
let repos_response: RepositoriesResponse = response.json().await?;
|
||||
Ok(repos_response.repositories)
|
||||
}
|
||||
|
||||
/// Post a comment on a pull request
|
||||
pub async fn post_pr_comment(
|
||||
&self,
|
||||
installation_id: i64,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
pr_number: u64,
|
||||
body: &str,
|
||||
) -> Result<(), GitHubAppError> {
|
||||
let token = self.get_installation_token(installation_id).await?;
|
||||
|
||||
// Use the issues API to post comments (PRs are issues in GitHub)
|
||||
let url = format!(
|
||||
"{}/repos/{}/{}/issues/{}/comments",
|
||||
GITHUB_API_BASE, owner, repo, pr_number
|
||||
);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", token))
|
||||
.header("Accept", "application/vnd.github+json")
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.header("X-GitHub-Api-Version", "2022-11-28")
|
||||
.json(&serde_json::json!({ "body": body }))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let message = response.text().await.unwrap_or_default();
|
||||
warn!(
|
||||
owner,
|
||||
repo, pr_number, status, message, "Failed to post PR comment"
|
||||
);
|
||||
return Err(GitHubAppError::Api { status, message });
|
||||
}
|
||||
|
||||
info!(owner, repo, pr_number, "Posted PR comment");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clone a repository using the installation token for authentication.
|
||||
///
|
||||
/// Returns a TempDir containing the cloned repository at the specified commit.
|
||||
/// The TempDir will be automatically cleaned up when dropped.
|
||||
pub async fn clone_repo(
|
||||
&self,
|
||||
installation_id: i64,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
head_sha: &str,
|
||||
) -> Result<TempDir, GitHubAppError> {
|
||||
let token = self.get_installation_token(installation_id).await?;
|
||||
|
||||
// Create temp directory
|
||||
let temp_dir = tempfile::tempdir()
|
||||
.map_err(|e| GitHubAppError::GitOperation(format!("Failed to create temp dir: {e}")))?;
|
||||
|
||||
let clone_url = format!(
|
||||
"https://x-access-token:{}@github.com/{}/{}.git",
|
||||
token, owner, repo
|
||||
);
|
||||
|
||||
debug!(owner, repo, head_sha, "Cloning repository");
|
||||
|
||||
// Clone the repository
|
||||
let output = Command::new("git")
|
||||
.args(["clone", "--depth", "1", &clone_url, "."])
|
||||
.current_dir(temp_dir.path())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
GitHubAppError::GitOperation("git is not installed or not in PATH".to_string())
|
||||
} else {
|
||||
GitHubAppError::GitOperation(format!("Failed to run git clone: {e}"))
|
||||
}
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
// Redact the token from error messages
|
||||
let redacted_stderr = stderr.replace(&token, "[REDACTED]");
|
||||
return Err(GitHubAppError::GitOperation(format!(
|
||||
"git clone failed: {redacted_stderr}"
|
||||
)));
|
||||
}
|
||||
|
||||
// Fetch the specific commit (in case it's not in shallow clone)
|
||||
let output = Command::new("git")
|
||||
.args(["fetch", "--depth", "1", "origin", head_sha])
|
||||
.current_dir(temp_dir.path())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
GitHubAppError::GitOperation("git is not installed or not in PATH".to_string())
|
||||
} else {
|
||||
GitHubAppError::GitOperation(format!("Failed to run git fetch: {e}"))
|
||||
}
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let redacted_stderr = stderr.replace(&token, "[REDACTED]");
|
||||
return Err(GitHubAppError::GitOperation(format!(
|
||||
"git fetch failed: {redacted_stderr}"
|
||||
)));
|
||||
}
|
||||
|
||||
// Checkout the specific commit
|
||||
let output = Command::new("git")
|
||||
.args(["checkout", head_sha])
|
||||
.current_dir(temp_dir.path())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
GitHubAppError::GitOperation("git is not installed or not in PATH".to_string())
|
||||
} else {
|
||||
GitHubAppError::GitOperation(format!("Failed to run git checkout: {e}"))
|
||||
}
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(GitHubAppError::GitOperation(format!(
|
||||
"git checkout failed: {stderr}"
|
||||
)));
|
||||
}
|
||||
|
||||
info!(owner, repo, head_sha, "Repository cloned successfully");
|
||||
Ok(temp_dir)
|
||||
}
|
||||
|
||||
/// Get details about a pull request
|
||||
pub async fn get_pr_details(
|
||||
&self,
|
||||
installation_id: i64,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
pr_number: u64,
|
||||
) -> Result<PrDetails, GitHubAppError> {
|
||||
let token = self.get_installation_token(installation_id).await?;
|
||||
|
||||
let url = format!(
|
||||
"{}/repos/{}/{}/pulls/{}",
|
||||
GITHUB_API_BASE, owner, repo, pr_number
|
||||
);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("Authorization", format!("Bearer {}", token))
|
||||
.header("Accept", "application/vnd.github+json")
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.header("X-GitHub-Api-Version", "2022-11-28")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status().as_u16();
|
||||
let message = response.text().await.unwrap_or_default();
|
||||
return Err(GitHubAppError::Api { status, message });
|
||||
}
|
||||
|
||||
let pr: PrDetails = response.json().await?;
|
||||
Ok(pr)
|
||||
}
|
||||
}
|
||||
80
crates/remote/src/github_app/webhook.rs
Normal file
80
crates/remote/src/github_app/webhook.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use hmac::{Hmac, Mac};
|
||||
use sha2::Sha256;
|
||||
use subtle::ConstantTimeEq;
|
||||
|
||||
type HmacSha256 = Hmac<Sha256>;
|
||||
|
||||
/// Verify a GitHub webhook signature.
|
||||
///
|
||||
/// GitHub sends the HMAC-SHA256 signature in the `X-Hub-Signature-256` header
|
||||
/// in the format `sha256=<hex-signature>`.
|
||||
///
|
||||
/// Returns true if the signature is valid.
|
||||
pub fn verify_webhook_signature(secret: &[u8], signature_header: &str, payload: &[u8]) -> bool {
|
||||
// Extract the hex signature from the header
|
||||
let Some(hex_signature) = signature_header.strip_prefix("sha256=") else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Decode the hex signature
|
||||
let Ok(expected_signature) = hex::decode(hex_signature) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Compute HMAC-SHA256
|
||||
let Ok(mut mac) = HmacSha256::new_from_slice(secret) else {
|
||||
return false;
|
||||
};
|
||||
mac.update(payload);
|
||||
let computed_signature = mac.finalize().into_bytes();
|
||||
|
||||
// Constant-time comparison to prevent timing attacks
|
||||
computed_signature[..].ct_eq(&expected_signature).into()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_valid_signature() {
|
||||
let secret = b"test-secret";
|
||||
let payload = b"test payload";
|
||||
|
||||
// Compute expected signature
|
||||
let mut mac = HmacSha256::new_from_slice(secret).unwrap();
|
||||
mac.update(payload);
|
||||
let signature = mac.finalize().into_bytes();
|
||||
let signature_header = format!("sha256={}", hex::encode(signature));
|
||||
|
||||
assert!(verify_webhook_signature(secret, &signature_header, payload));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_signature() {
|
||||
let secret = b"test-secret";
|
||||
let payload = b"test payload";
|
||||
let wrong_signature =
|
||||
"sha256=0000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
assert!(!verify_webhook_signature(secret, wrong_signature, payload));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_prefix() {
|
||||
let secret = b"test-secret";
|
||||
let payload = b"test payload";
|
||||
let no_prefix = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
assert!(!verify_webhook_signature(secret, no_prefix, payload));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_hex() {
|
||||
let secret = b"test-secret";
|
||||
let payload = b"test payload";
|
||||
let invalid_hex = "sha256=not-valid-hex";
|
||||
|
||||
assert!(!verify_webhook_signature(secret, invalid_hex, payload));
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,9 @@ mod app;
|
||||
mod auth;
|
||||
pub mod config;
|
||||
pub mod db;
|
||||
pub mod github_app;
|
||||
pub mod mail;
|
||||
pub mod r2;
|
||||
pub mod routes;
|
||||
mod state;
|
||||
pub mod validated_where;
|
||||
|
||||
@@ -6,6 +6,8 @@ use serde_json::json;
|
||||
use crate::db::organization_members::MemberRole;
|
||||
|
||||
const LOOPS_INVITE_TEMPLATE_ID: &str = "cmhvy2wgs3s13z70i1pxakij9";
|
||||
const LOOPS_REVIEW_READY_TEMPLATE_ID: &str = "cmj47k5ge16990iylued9by17";
|
||||
const LOOPS_REVIEW_FAILED_TEMPLATE_ID: &str = "cmj49ougk1c8s0iznavijdqpo";
|
||||
|
||||
#[async_trait]
|
||||
pub trait Mailer: Send + Sync {
|
||||
@@ -17,6 +19,10 @@ pub trait Mailer: Send + Sync {
|
||||
role: MemberRole,
|
||||
invited_by: Option<&str>,
|
||||
);
|
||||
|
||||
async fn send_review_ready(&self, email: &str, review_url: &str, pr_name: &str);
|
||||
|
||||
async fn send_review_failed(&self, email: &str, pr_name: &str, review_id: &str);
|
||||
}
|
||||
|
||||
pub struct LoopsMailer {
|
||||
@@ -93,4 +99,86 @@ impl Mailer for LoopsMailer {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_review_ready(&self, email: &str, review_url: &str, pr_name: &str) {
|
||||
if cfg!(debug_assertions) {
|
||||
tracing::info!(
|
||||
"Sending review ready email to {email}\n\
|
||||
PR: {pr_name}\n\
|
||||
Review URL: {review_url}"
|
||||
);
|
||||
}
|
||||
|
||||
let payload = json!({
|
||||
"transactionalId": LOOPS_REVIEW_READY_TEMPLATE_ID,
|
||||
"email": email,
|
||||
"dataVariables": {
|
||||
"review_url": review_url,
|
||||
"pr_name": pr_name,
|
||||
}
|
||||
});
|
||||
|
||||
let res = self
|
||||
.client
|
||||
.post("https://app.loops.so/api/v1/transactional")
|
||||
.bearer_auth(&self.api_key)
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
tracing::debug!("Review ready email sent via Loops to {email}");
|
||||
}
|
||||
Ok(resp) => {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
tracing::warn!(status = %status, body = %body, "Loops send failed for review ready");
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!(error = ?err, "Loops request error for review ready");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_review_failed(&self, email: &str, pr_name: &str, review_id: &str) {
|
||||
if cfg!(debug_assertions) {
|
||||
tracing::info!(
|
||||
"Sending review failed email to {email}\n\
|
||||
PR: {pr_name}\n\
|
||||
Review ID: {review_id}"
|
||||
);
|
||||
}
|
||||
|
||||
let payload = json!({
|
||||
"transactionalId": LOOPS_REVIEW_FAILED_TEMPLATE_ID,
|
||||
"email": email,
|
||||
"dataVariables": {
|
||||
"pr_name": pr_name,
|
||||
"review_id": review_id,
|
||||
}
|
||||
});
|
||||
|
||||
let res = self
|
||||
.client
|
||||
.post("https://app.loops.so/api/v1/transactional")
|
||||
.bearer_auth(&self.api_key)
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
tracing::debug!("Review failed email sent via Loops to {email}");
|
||||
}
|
||||
Ok(resp) => {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
tracing::warn!(status = %status, body = %body, "Loops send failed for review failed");
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!(error = ?err, "Loops request error for review failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
134
crates/remote/src/r2.rs
Normal file
134
crates/remote/src/r2.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use aws_credential_types::Credentials;
|
||||
use aws_sdk_s3::{
|
||||
Client,
|
||||
config::{Builder as S3ConfigBuilder, IdentityCache},
|
||||
presigning::PresigningConfig,
|
||||
primitives::ByteStream,
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
use secrecy::ExposeSecret;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::R2Config;
|
||||
|
||||
/// Well-known filename for the payload tarball stored in each review folder.
|
||||
pub const PAYLOAD_FILENAME: &str = "payload.tar.gz";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct R2Service {
|
||||
client: Client,
|
||||
bucket: String,
|
||||
presign_expiry: Duration,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PresignedUpload {
|
||||
pub upload_url: String,
|
||||
pub object_key: String,
|
||||
/// Folder path in R2 (e.g., "reviews/{review_id}") - this is stored in the database.
|
||||
pub folder_path: String,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum R2Error {
|
||||
#[error("presign config error: {0}")]
|
||||
PresignConfig(String),
|
||||
#[error("presign error: {0}")]
|
||||
Presign(String),
|
||||
#[error("upload error: {0}")]
|
||||
Upload(String),
|
||||
}
|
||||
|
||||
impl R2Service {
|
||||
pub fn new(config: &R2Config) -> Self {
|
||||
let credentials = Credentials::new(
|
||||
&config.access_key_id,
|
||||
config.secret_access_key.expose_secret(),
|
||||
None,
|
||||
None,
|
||||
"r2-static",
|
||||
);
|
||||
|
||||
let s3_config =
|
||||
S3ConfigBuilder::new()
|
||||
.region(aws_sdk_s3::config::Region::new("auto"))
|
||||
.endpoint_url(&config.endpoint)
|
||||
.credentials_provider(credentials)
|
||||
.force_path_style(true)
|
||||
.stalled_stream_protection(
|
||||
aws_sdk_s3::config::StalledStreamProtectionConfig::disabled(),
|
||||
)
|
||||
.identity_cache(IdentityCache::no_cache())
|
||||
.build();
|
||||
|
||||
let client = Client::from_conf(s3_config);
|
||||
|
||||
Self {
|
||||
client,
|
||||
bucket: config.bucket.clone(),
|
||||
presign_expiry: Duration::from_secs(config.presign_expiry_secs),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_presigned_upload(
|
||||
&self,
|
||||
review_id: Uuid,
|
||||
content_type: Option<&str>,
|
||||
) -> Result<PresignedUpload, R2Error> {
|
||||
let folder_path = format!("reviews/{review_id}");
|
||||
let object_key = format!("{folder_path}/{PAYLOAD_FILENAME}");
|
||||
|
||||
let presigning_config = PresigningConfig::builder()
|
||||
.expires_in(self.presign_expiry)
|
||||
.build()
|
||||
.map_err(|e| R2Error::PresignConfig(e.to_string()))?;
|
||||
|
||||
let mut request = self
|
||||
.client
|
||||
.put_object()
|
||||
.bucket(&self.bucket)
|
||||
.key(&object_key);
|
||||
|
||||
if let Some(ct) = content_type {
|
||||
request = request.content_type(ct);
|
||||
}
|
||||
|
||||
let presigned = request
|
||||
.presigned(presigning_config)
|
||||
.await
|
||||
.map_err(|e| R2Error::Presign(e.to_string()))?;
|
||||
|
||||
let expires_at = Utc::now()
|
||||
+ chrono::Duration::from_std(self.presign_expiry).unwrap_or(chrono::Duration::hours(1));
|
||||
|
||||
Ok(PresignedUpload {
|
||||
upload_url: presigned.uri().to_string(),
|
||||
object_key,
|
||||
folder_path,
|
||||
expires_at,
|
||||
})
|
||||
}
|
||||
|
||||
/// Upload bytes directly to R2 (for server-side uploads).
|
||||
///
|
||||
/// Returns the folder path (e.g., "reviews/{review_id}") to store in the database.
|
||||
pub async fn upload_bytes(&self, review_id: Uuid, data: Vec<u8>) -> Result<String, R2Error> {
|
||||
let folder_path = format!("reviews/{review_id}");
|
||||
let object_key = format!("{folder_path}/{PAYLOAD_FILENAME}");
|
||||
|
||||
self.client
|
||||
.put_object()
|
||||
.bucket(&self.bucket)
|
||||
.key(&object_key)
|
||||
.body(ByteStream::from(data))
|
||||
.content_type("application/gzip")
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| R2Error::Upload(e.to_string()))?;
|
||||
|
||||
Ok(folder_path)
|
||||
}
|
||||
}
|
||||
821
crates/remote/src/routes/github_app.rs
Normal file
821
crates/remote/src/routes/github_app.rs
Normal file
@@ -0,0 +1,821 @@
|
||||
use axum::{
|
||||
Json, Router,
|
||||
body::Bytes,
|
||||
extract::{Path, Query, State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::{IntoResponse, Redirect, Response},
|
||||
routing::{delete, get, post},
|
||||
};
|
||||
use chrono::{Duration, Utc};
|
||||
use secrecy::ExposeSecret;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{error, info, warn};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::error::ErrorResponse;
|
||||
use crate::{
|
||||
AppState,
|
||||
auth::RequestContext,
|
||||
db::{
|
||||
github_app::GitHubAppRepository2, identity_errors::IdentityError,
|
||||
organizations::OrganizationRepository,
|
||||
},
|
||||
github_app::{PrReviewParams, PrReviewService, verify_webhook_signature},
|
||||
};
|
||||
|
||||
// ========== Public Routes ==========
|
||||
|
||||
pub fn public_router() -> Router<AppState> {
|
||||
Router::new()
|
||||
.route("/github/webhook", post(handle_webhook))
|
||||
.route("/github/app/callback", get(handle_callback))
|
||||
}
|
||||
|
||||
// ========== Protected Routes ==========
|
||||
|
||||
pub fn protected_router() -> Router<AppState> {
|
||||
Router::new()
|
||||
.route(
|
||||
"/organizations/{org_id}/github-app/install-url",
|
||||
get(get_install_url),
|
||||
)
|
||||
.route("/organizations/{org_id}/github-app/status", get(get_status))
|
||||
.route("/organizations/{org_id}/github-app", delete(uninstall))
|
||||
.route("/debug/pr-review/trigger", post(trigger_pr_review))
|
||||
}
|
||||
|
||||
// ========== Types ==========
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct InstallUrlResponse {
|
||||
pub install_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct GitHubAppStatusResponse {
|
||||
pub installed: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub installation: Option<InstallationDetails>,
|
||||
pub repositories: Vec<RepositoryDetails>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct InstallationDetails {
|
||||
pub id: String,
|
||||
pub github_installation_id: i64,
|
||||
pub github_account_login: String,
|
||||
pub github_account_type: String,
|
||||
pub repository_selection: String,
|
||||
pub suspended_at: Option<String>,
|
||||
pub created_at: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct RepositoryDetails {
|
||||
pub id: String,
|
||||
pub github_repo_id: i64,
|
||||
pub repo_full_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CallbackQuery {
|
||||
pub installation_id: Option<i64>,
|
||||
pub state: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TriggerPrReviewRequest {
|
||||
/// GitHub PR URL, e.g., "https://github.com/owner/repo/pull/123"
|
||||
pub pr_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct TriggerPrReviewResponse {
|
||||
pub review_id: Uuid,
|
||||
}
|
||||
|
||||
// ========== Protected Route Handlers ==========
|
||||
|
||||
/// GET /v1/organizations/:org_id/github-app/install-url
|
||||
/// Returns URL to install the GitHub App for this organization
|
||||
pub async fn get_install_url(
|
||||
State(state): State<AppState>,
|
||||
axum::extract::Extension(ctx): axum::extract::Extension<RequestContext>,
|
||||
Path(org_id): Path<Uuid>,
|
||||
) -> Result<impl IntoResponse, ErrorResponse> {
|
||||
// Check GitHub App is configured
|
||||
let github_app = state.github_app().ok_or_else(|| {
|
||||
ErrorResponse::new(StatusCode::NOT_IMPLEMENTED, "GitHub App not configured")
|
||||
})?;
|
||||
|
||||
// Check user is admin of organization
|
||||
let org_repo = OrganizationRepository::new(state.pool());
|
||||
org_repo
|
||||
.assert_admin(org_id, ctx.user.id)
|
||||
.await
|
||||
.map_err(|e| match e {
|
||||
IdentityError::PermissionDenied => {
|
||||
ErrorResponse::new(StatusCode::FORBIDDEN, "Admin access required")
|
||||
}
|
||||
IdentityError::NotFound => {
|
||||
ErrorResponse::new(StatusCode::NOT_FOUND, "Organization not found")
|
||||
}
|
||||
_ => ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"),
|
||||
})?;
|
||||
|
||||
// Check not a personal org
|
||||
let is_personal = org_repo
|
||||
.is_personal(org_id)
|
||||
.await
|
||||
.map_err(|_| ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"))?;
|
||||
|
||||
if is_personal {
|
||||
return Err(ErrorResponse::new(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"GitHub App cannot be installed on personal organizations",
|
||||
));
|
||||
}
|
||||
|
||||
// Generate state token (simple format: org_id:user_id:timestamp)
|
||||
// In production, you'd want to sign this with HMAC
|
||||
let expires_at = Utc::now() + Duration::minutes(10);
|
||||
let state_token = format!("{}:{}:{}", org_id, ctx.user.id, expires_at.timestamp());
|
||||
|
||||
// Store pending installation
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
gh_repo
|
||||
.create_pending(org_id, ctx.user.id, &state_token, expires_at)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(?e, "Failed to create pending installation");
|
||||
ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error")
|
||||
})?;
|
||||
|
||||
// Build installation URL
|
||||
let install_url = format!(
|
||||
"https://github.com/apps/{}/installations/new?state={}",
|
||||
github_app.app_slug(),
|
||||
urlencoding::encode(&state_token)
|
||||
);
|
||||
|
||||
Ok(Json(InstallUrlResponse { install_url }))
|
||||
}
|
||||
|
||||
/// GET /v1/organizations/:org_id/github-app/status
|
||||
/// Returns the GitHub App installation status for this organization
|
||||
pub async fn get_status(
|
||||
State(state): State<AppState>,
|
||||
axum::extract::Extension(ctx): axum::extract::Extension<RequestContext>,
|
||||
Path(org_id): Path<Uuid>,
|
||||
) -> Result<impl IntoResponse, ErrorResponse> {
|
||||
// Check user is member of organization
|
||||
let org_repo = OrganizationRepository::new(state.pool());
|
||||
org_repo
|
||||
.assert_membership(org_id, ctx.user.id)
|
||||
.await
|
||||
.map_err(|e| match e {
|
||||
IdentityError::PermissionDenied | IdentityError::NotFound => {
|
||||
ErrorResponse::new(StatusCode::FORBIDDEN, "Access denied")
|
||||
}
|
||||
_ => ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"),
|
||||
})?;
|
||||
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
|
||||
let installation = gh_repo.get_by_organization(org_id).await.map_err(|e| {
|
||||
error!(?e, "Failed to get GitHub App installation");
|
||||
ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error")
|
||||
})?;
|
||||
|
||||
match installation {
|
||||
Some(inst) => {
|
||||
let repositories = gh_repo.get_repositories(inst.id).await.map_err(|e| {
|
||||
error!(?e, "Failed to get repositories");
|
||||
ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error")
|
||||
})?;
|
||||
|
||||
Ok(Json(GitHubAppStatusResponse {
|
||||
installed: true,
|
||||
installation: Some(InstallationDetails {
|
||||
id: inst.id.to_string(),
|
||||
github_installation_id: inst.github_installation_id,
|
||||
github_account_login: inst.github_account_login,
|
||||
github_account_type: inst.github_account_type,
|
||||
repository_selection: inst.repository_selection,
|
||||
suspended_at: inst.suspended_at.map(|t| t.to_rfc3339()),
|
||||
created_at: inst.created_at.to_rfc3339(),
|
||||
}),
|
||||
repositories: repositories
|
||||
.into_iter()
|
||||
.map(|r| RepositoryDetails {
|
||||
id: r.id.to_string(),
|
||||
github_repo_id: r.github_repo_id,
|
||||
repo_full_name: r.repo_full_name,
|
||||
})
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
None => Ok(Json(GitHubAppStatusResponse {
|
||||
installed: false,
|
||||
installation: None,
|
||||
repositories: vec![],
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
/// DELETE /v1/organizations/:org_id/github-app
|
||||
/// Removes the local installation record (does not uninstall from GitHub)
|
||||
pub async fn uninstall(
|
||||
State(state): State<AppState>,
|
||||
axum::extract::Extension(ctx): axum::extract::Extension<RequestContext>,
|
||||
Path(org_id): Path<Uuid>,
|
||||
) -> Result<impl IntoResponse, ErrorResponse> {
|
||||
// Check user is admin of organization
|
||||
let org_repo = OrganizationRepository::new(state.pool());
|
||||
org_repo
|
||||
.assert_admin(org_id, ctx.user.id)
|
||||
.await
|
||||
.map_err(|e| match e {
|
||||
IdentityError::PermissionDenied => {
|
||||
ErrorResponse::new(StatusCode::FORBIDDEN, "Admin access required")
|
||||
}
|
||||
IdentityError::NotFound => {
|
||||
ErrorResponse::new(StatusCode::NOT_FOUND, "Organization not found")
|
||||
}
|
||||
_ => ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error"),
|
||||
})?;
|
||||
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
gh_repo.delete_by_organization(org_id).await.map_err(|e| {
|
||||
error!(?e, "Failed to delete GitHub App installation");
|
||||
ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, "Database error")
|
||||
})?;
|
||||
|
||||
info!(org_id = %org_id, user_id = %ctx.user.id, "GitHub App installation removed");
|
||||
Ok(StatusCode::NO_CONTENT)
|
||||
}
|
||||
|
||||
// ========== Public Route Handlers ==========
|
||||
|
||||
/// GET /v1/github/app/callback
|
||||
/// Handles redirect from GitHub after app installation
|
||||
pub async fn handle_callback(
|
||||
State(state): State<AppState>,
|
||||
Query(query): Query<CallbackQuery>,
|
||||
) -> Response {
|
||||
let frontend_base = state
|
||||
.config
|
||||
.server_public_base_url
|
||||
.clone()
|
||||
.unwrap_or_else(|| "http://localhost:3000".to_string());
|
||||
|
||||
// Helper to redirect with error
|
||||
let redirect_error = |org_id: Option<Uuid>, error: &str| -> Response {
|
||||
let url = match org_id {
|
||||
Some(id) => format!(
|
||||
"{}/account/organizations/{}?github_app_error={}",
|
||||
frontend_base,
|
||||
id,
|
||||
urlencoding::encode(error)
|
||||
),
|
||||
None => format!(
|
||||
"{}/account?github_app_error={}",
|
||||
frontend_base,
|
||||
urlencoding::encode(error)
|
||||
),
|
||||
};
|
||||
Redirect::temporary(&url).into_response()
|
||||
};
|
||||
|
||||
// Check GitHub App is configured
|
||||
let Some(github_app) = state.github_app() else {
|
||||
return redirect_error(None, "GitHub App not configured");
|
||||
};
|
||||
|
||||
// Validate required params
|
||||
let Some(installation_id) = query.installation_id else {
|
||||
return redirect_error(None, "Missing installation_id");
|
||||
};
|
||||
|
||||
let Some(state_token) = query.state else {
|
||||
return redirect_error(None, "Missing state parameter");
|
||||
};
|
||||
|
||||
// Parse state token: org_id:user_id:timestamp
|
||||
let parts: Vec<&str> = state_token.split(':').collect();
|
||||
if parts.len() != 3 {
|
||||
return redirect_error(None, "Invalid state token format");
|
||||
}
|
||||
|
||||
let Ok(org_id) = Uuid::parse_str(parts[0]) else {
|
||||
return redirect_error(None, "Invalid organization ID in state");
|
||||
};
|
||||
|
||||
let Ok(user_id) = Uuid::parse_str(parts[1]) else {
|
||||
return redirect_error(Some(org_id), "Invalid user ID in state");
|
||||
};
|
||||
|
||||
let Ok(timestamp) = parts[2].parse::<i64>() else {
|
||||
return redirect_error(Some(org_id), "Invalid timestamp in state");
|
||||
};
|
||||
|
||||
// Check expiry
|
||||
if Utc::now().timestamp() > timestamp {
|
||||
return redirect_error(Some(org_id), "Installation link expired");
|
||||
}
|
||||
|
||||
// Verify pending installation exists
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
let pending = match gh_repo.get_pending_by_state(&state_token).await {
|
||||
Ok(Some(p)) => p,
|
||||
Ok(None) => {
|
||||
return redirect_error(Some(org_id), "Installation not found or expired");
|
||||
}
|
||||
Err(e) => {
|
||||
error!(?e, "Failed to get pending installation");
|
||||
return redirect_error(Some(org_id), "Database error");
|
||||
}
|
||||
};
|
||||
|
||||
// Fetch installation details from GitHub
|
||||
let installation_info = match github_app.get_installation(installation_id).await {
|
||||
Ok(info) => info,
|
||||
Err(e) => {
|
||||
error!(?e, "Failed to get installation from GitHub");
|
||||
return redirect_error(Some(org_id), "Failed to verify installation with GitHub");
|
||||
}
|
||||
};
|
||||
|
||||
// Create installation record
|
||||
if let Err(e) = gh_repo
|
||||
.create_installation(
|
||||
pending.organization_id,
|
||||
installation_id,
|
||||
&installation_info.account.login,
|
||||
&installation_info.account.account_type,
|
||||
&installation_info.repository_selection,
|
||||
user_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
error!(?e, "Failed to create installation record");
|
||||
return redirect_error(Some(org_id), "Failed to save installation");
|
||||
}
|
||||
|
||||
// Delete pending record
|
||||
if let Err(e) = gh_repo.delete_pending(&state_token).await {
|
||||
warn!(?e, "Failed to delete pending installation record");
|
||||
}
|
||||
|
||||
// Fetch and store repositories if selection is "selected"
|
||||
if installation_info.repository_selection == "selected"
|
||||
&& let Ok(repos) = github_app.list_installation_repos(installation_id).await
|
||||
{
|
||||
let installation = gh_repo
|
||||
.get_by_github_id(installation_id)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
if let Some(inst) = installation {
|
||||
let repo_data: Vec<(i64, String)> =
|
||||
repos.into_iter().map(|r| (r.id, r.full_name)).collect();
|
||||
if let Err(e) = gh_repo.sync_repositories(inst.id, &repo_data).await {
|
||||
warn!(?e, "Failed to sync repositories");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
org_id = %org_id,
|
||||
installation_id = installation_id,
|
||||
account = %installation_info.account.login,
|
||||
"GitHub App installed successfully"
|
||||
);
|
||||
|
||||
// Redirect to organization page with success
|
||||
let url = format!(
|
||||
"{}/account/organizations/{}?github_app=installed",
|
||||
frontend_base, org_id
|
||||
);
|
||||
Redirect::temporary(&url).into_response()
|
||||
}
|
||||
|
||||
/// POST /v1/github/webhook
|
||||
/// Handles webhook events from GitHub
|
||||
pub async fn handle_webhook(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
body: Bytes,
|
||||
) -> Response {
|
||||
// Check GitHub App is configured
|
||||
let Some(github_app) = state.github_app() else {
|
||||
warn!("Received webhook but GitHub App not configured");
|
||||
return StatusCode::NOT_IMPLEMENTED.into_response();
|
||||
};
|
||||
|
||||
// Verify signature
|
||||
let signature = headers
|
||||
.get("X-Hub-Signature-256")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
if !verify_webhook_signature(
|
||||
github_app.webhook_secret().expose_secret().as_bytes(),
|
||||
signature,
|
||||
&body,
|
||||
) {
|
||||
warn!("Invalid webhook signature");
|
||||
return StatusCode::UNAUTHORIZED.into_response();
|
||||
}
|
||||
|
||||
// Get event type
|
||||
let event_type = headers
|
||||
.get("X-GitHub-Event")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("unknown");
|
||||
|
||||
info!(event_type, "Received GitHub webhook");
|
||||
|
||||
// Parse payload
|
||||
let payload: serde_json::Value = match serde_json::from_slice(&body) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
warn!(?e, "Failed to parse webhook payload");
|
||||
return StatusCode::BAD_REQUEST.into_response();
|
||||
}
|
||||
};
|
||||
|
||||
// Handle different event types
|
||||
match event_type {
|
||||
"installation" => handle_installation_event(&state, &payload).await,
|
||||
"installation_repositories" => handle_installation_repos_event(&state, &payload).await,
|
||||
"pull_request" => handle_pull_request_event(&state, github_app, &payload).await,
|
||||
_ => {
|
||||
info!(event_type, "Ignoring unhandled webhook event");
|
||||
StatusCode::OK.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ========== Webhook Event Handlers ==========
|
||||
|
||||
async fn handle_installation_event(state: &AppState, payload: &serde_json::Value) -> Response {
|
||||
let action = payload["action"].as_str().unwrap_or("");
|
||||
let installation_id = payload["installation"]["id"].as_i64().unwrap_or(0);
|
||||
|
||||
info!(action, installation_id, "Processing installation event");
|
||||
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
|
||||
match action {
|
||||
"deleted" => {
|
||||
if let Err(e) = gh_repo.delete_by_github_id(installation_id).await {
|
||||
error!(?e, "Failed to delete installation");
|
||||
} else {
|
||||
info!(installation_id, "Installation deleted");
|
||||
}
|
||||
}
|
||||
"suspend" => {
|
||||
if let Err(e) = gh_repo.suspend(installation_id).await {
|
||||
error!(?e, "Failed to suspend installation");
|
||||
} else {
|
||||
info!(installation_id, "Installation suspended");
|
||||
}
|
||||
}
|
||||
"unsuspend" => {
|
||||
if let Err(e) = gh_repo.unsuspend(installation_id).await {
|
||||
error!(?e, "Failed to unsuspend installation");
|
||||
} else {
|
||||
info!(installation_id, "Installation unsuspended");
|
||||
}
|
||||
}
|
||||
"created" => {
|
||||
// Installation created via webhook (without going through our flow)
|
||||
// This shouldn't happen if orphan installations are rejected
|
||||
info!(
|
||||
installation_id,
|
||||
"Installation created event received (orphan)"
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
info!(action, "Ignoring installation action");
|
||||
}
|
||||
}
|
||||
|
||||
StatusCode::OK.into_response()
|
||||
}
|
||||
|
||||
async fn handle_installation_repos_event(
|
||||
state: &AppState,
|
||||
payload: &serde_json::Value,
|
||||
) -> Response {
|
||||
let action = payload["action"].as_str().unwrap_or("");
|
||||
let installation_id = payload["installation"]["id"].as_i64().unwrap_or(0);
|
||||
|
||||
info!(
|
||||
action,
|
||||
installation_id, "Processing installation_repositories event"
|
||||
);
|
||||
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
|
||||
// Get our installation record
|
||||
let installation = match gh_repo.get_by_github_id(installation_id).await {
|
||||
Ok(Some(inst)) => inst,
|
||||
Ok(None) => {
|
||||
info!(installation_id, "Installation not found, ignoring");
|
||||
return StatusCode::OK.into_response();
|
||||
}
|
||||
Err(e) => {
|
||||
error!(?e, "Failed to get installation");
|
||||
return StatusCode::OK.into_response();
|
||||
}
|
||||
};
|
||||
|
||||
match action {
|
||||
"added" => {
|
||||
let repos: Vec<(i64, String)> = payload["repositories_added"]
|
||||
.as_array()
|
||||
.unwrap_or(&vec![])
|
||||
.iter()
|
||||
.filter_map(|r| {
|
||||
let id = r["id"].as_i64()?;
|
||||
let name = r["full_name"].as_str()?;
|
||||
Some((id, name.to_string()))
|
||||
})
|
||||
.collect();
|
||||
|
||||
if let Err(e) = gh_repo.add_repositories(installation.id, &repos).await {
|
||||
error!(?e, "Failed to add repositories");
|
||||
} else {
|
||||
info!(installation_id, count = repos.len(), "Repositories added");
|
||||
}
|
||||
}
|
||||
"removed" => {
|
||||
let repo_ids: Vec<i64> = payload["repositories_removed"]
|
||||
.as_array()
|
||||
.unwrap_or(&vec![])
|
||||
.iter()
|
||||
.filter_map(|r| r["id"].as_i64())
|
||||
.collect();
|
||||
|
||||
if let Err(e) = gh_repo
|
||||
.remove_repositories(installation.id, &repo_ids)
|
||||
.await
|
||||
{
|
||||
error!(?e, "Failed to remove repositories");
|
||||
} else {
|
||||
info!(
|
||||
installation_id,
|
||||
count = repo_ids.len(),
|
||||
"Repositories removed"
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
info!(action, "Ignoring repositories action");
|
||||
}
|
||||
}
|
||||
|
||||
// Update repository selection if changed
|
||||
let new_selection = payload["repository_selection"].as_str().unwrap_or("");
|
||||
if !new_selection.is_empty()
|
||||
&& new_selection != installation.repository_selection
|
||||
&& let Err(e) = gh_repo
|
||||
.update_repository_selection(installation_id, new_selection)
|
||||
.await
|
||||
{
|
||||
error!(?e, "Failed to update repository selection");
|
||||
}
|
||||
|
||||
StatusCode::OK.into_response()
|
||||
}
|
||||
|
||||
async fn handle_pull_request_event(
|
||||
state: &AppState,
|
||||
github_app: &crate::github_app::GitHubAppService,
|
||||
payload: &serde_json::Value,
|
||||
) -> Response {
|
||||
use crate::github_app::{PrReviewParams, PrReviewService};
|
||||
|
||||
let action = payload["action"].as_str().unwrap_or("");
|
||||
|
||||
// Only handle opened PRs
|
||||
if action != "opened" {
|
||||
return StatusCode::OK.into_response();
|
||||
}
|
||||
|
||||
let installation_id = payload["installation"]["id"].as_i64().unwrap_or(0);
|
||||
let pr_number = payload["pull_request"]["number"].as_u64().unwrap_or(0);
|
||||
let repo_owner = payload["repository"]["owner"]["login"]
|
||||
.as_str()
|
||||
.unwrap_or("");
|
||||
let repo_name = payload["repository"]["name"].as_str().unwrap_or("");
|
||||
|
||||
info!(
|
||||
installation_id,
|
||||
pr_number, repo_owner, repo_name, "Processing pull_request.opened event"
|
||||
);
|
||||
|
||||
// Check if we have this installation
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
let installation = match gh_repo.get_by_github_id(installation_id).await {
|
||||
Ok(Some(inst)) => inst,
|
||||
Ok(None) => {
|
||||
info!(installation_id, "Installation not found, ignoring PR");
|
||||
return StatusCode::OK.into_response();
|
||||
}
|
||||
Err(e) => {
|
||||
error!(?e, "Failed to get installation");
|
||||
return StatusCode::OK.into_response();
|
||||
}
|
||||
};
|
||||
|
||||
// Check if installation is suspended
|
||||
if installation.suspended_at.is_some() {
|
||||
info!(installation_id, "Installation is suspended, ignoring PR");
|
||||
return StatusCode::OK.into_response();
|
||||
}
|
||||
|
||||
// Check if R2 and review worker are configured
|
||||
let Some(r2) = state.r2() else {
|
||||
info!("R2 not configured, skipping PR review");
|
||||
return StatusCode::OK.into_response();
|
||||
};
|
||||
|
||||
let Some(worker_base_url) = state.config.review_worker_base_url.as_ref() else {
|
||||
info!("Review worker not configured, skipping PR review");
|
||||
return StatusCode::OK.into_response();
|
||||
};
|
||||
|
||||
// Extract PR metadata from payload
|
||||
let pr_title = payload["pull_request"]["title"]
|
||||
.as_str()
|
||||
.unwrap_or("Untitled PR")
|
||||
.to_string();
|
||||
let pr_body = payload["pull_request"]["body"]
|
||||
.as_str()
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let head_sha = payload["pull_request"]["head"]["sha"]
|
||||
.as_str()
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let base_sha = payload["pull_request"]["base"]["sha"]
|
||||
.as_str()
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
// Spawn async task to process PR review
|
||||
let github_app_clone = github_app.clone();
|
||||
let r2_clone = r2.clone();
|
||||
let http_client = state.http_client.clone();
|
||||
let worker_url = worker_base_url.clone();
|
||||
let server_url = state.server_public_base_url.clone();
|
||||
let pool = state.pool.clone();
|
||||
let repo_owner = repo_owner.to_string();
|
||||
let repo_name = repo_name.to_string();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let service = PrReviewService::new(
|
||||
github_app_clone,
|
||||
r2_clone,
|
||||
http_client,
|
||||
worker_url,
|
||||
server_url,
|
||||
);
|
||||
|
||||
let params = PrReviewParams {
|
||||
installation_id,
|
||||
owner: repo_owner.clone(),
|
||||
repo: repo_name.clone(),
|
||||
pr_number,
|
||||
pr_title,
|
||||
pr_body,
|
||||
head_sha,
|
||||
base_sha,
|
||||
};
|
||||
|
||||
if let Err(e) = service.process_pr_review(&pool, params).await {
|
||||
error!(
|
||||
?e,
|
||||
installation_id, pr_number, repo_owner, repo_name, "Failed to start PR review"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
StatusCode::OK.into_response()
|
||||
}
|
||||
|
||||
// ========== Debug Endpoint ==========
|
||||
|
||||
/// Parse a GitHub PR URL into (owner, repo, pr_number)
|
||||
fn parse_pr_url(url: &str) -> Option<(String, String, u64)> {
|
||||
// Parse URLs like: https://github.com/owner/repo/pull/123
|
||||
let url = url.trim_end_matches('/');
|
||||
let parts: Vec<&str> = url.split('/').collect();
|
||||
|
||||
// Find "github.com" and get owner/repo/pull/number
|
||||
let github_idx = parts.iter().position(|&p| p == "github.com")?;
|
||||
|
||||
if parts.len() < github_idx + 5 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let owner = parts[github_idx + 1].to_string();
|
||||
let repo = parts[github_idx + 2].to_string();
|
||||
|
||||
if parts[github_idx + 3] != "pull" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let pr_number: u64 = parts[github_idx + 4].parse().ok()?;
|
||||
|
||||
Some((owner, repo, pr_number))
|
||||
}
|
||||
|
||||
/// POST /v1/debug/pr-review/trigger
|
||||
/// Manually trigger a PR review for debugging purposes
|
||||
pub async fn trigger_pr_review(
|
||||
State(state): State<AppState>,
|
||||
Json(payload): Json<TriggerPrReviewRequest>,
|
||||
) -> Result<Json<TriggerPrReviewResponse>, ErrorResponse> {
|
||||
// 1. Parse PR URL
|
||||
let (owner, repo, pr_number) = parse_pr_url(&payload.pr_url)
|
||||
.ok_or_else(|| ErrorResponse::new(StatusCode::BAD_REQUEST, "Invalid PR URL format"))?;
|
||||
|
||||
// 2. Validate services are configured
|
||||
let github_app = state.github_app().ok_or_else(|| {
|
||||
ErrorResponse::new(StatusCode::SERVICE_UNAVAILABLE, "GitHub App not configured")
|
||||
})?;
|
||||
let r2 = state
|
||||
.r2()
|
||||
.ok_or_else(|| ErrorResponse::new(StatusCode::SERVICE_UNAVAILABLE, "R2 not configured"))?;
|
||||
let worker_base_url = state
|
||||
.config
|
||||
.review_worker_base_url
|
||||
.as_ref()
|
||||
.ok_or_else(|| {
|
||||
ErrorResponse::new(
|
||||
StatusCode::SERVICE_UNAVAILABLE,
|
||||
"Review worker not configured",
|
||||
)
|
||||
})?;
|
||||
|
||||
// 3. Look up installation by owner
|
||||
let gh_repo = GitHubAppRepository2::new(state.pool());
|
||||
let installation = gh_repo
|
||||
.get_by_account_login(&owner)
|
||||
.await
|
||||
.map_err(|e| ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?
|
||||
.ok_or_else(|| {
|
||||
ErrorResponse::new(
|
||||
StatusCode::NOT_FOUND,
|
||||
format!("No installation found for {}", owner),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 4. Fetch PR details from GitHub API
|
||||
let pr_details = github_app
|
||||
.get_pr_details(
|
||||
installation.github_installation_id,
|
||||
&owner,
|
||||
&repo,
|
||||
pr_number,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| ErrorResponse::new(StatusCode::BAD_GATEWAY, e.to_string()))?;
|
||||
|
||||
// 5. Create service and process review
|
||||
let service = PrReviewService::new(
|
||||
github_app.clone(),
|
||||
r2.clone(),
|
||||
state.http_client.clone(),
|
||||
worker_base_url.clone(),
|
||||
state.server_public_base_url.clone(),
|
||||
);
|
||||
|
||||
let params = PrReviewParams {
|
||||
installation_id: installation.github_installation_id,
|
||||
owner,
|
||||
repo,
|
||||
pr_number,
|
||||
pr_title: pr_details.title,
|
||||
pr_body: pr_details.body.unwrap_or_default(),
|
||||
head_sha: pr_details.head.sha,
|
||||
base_sha: pr_details.base.sha,
|
||||
};
|
||||
|
||||
let review_id = service
|
||||
.process_pr_review(state.pool(), params)
|
||||
.await
|
||||
.map_err(|e| ErrorResponse::new(StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
info!(
|
||||
review_id = %review_id,
|
||||
pr_url = %payload.pr_url,
|
||||
"Manual PR review triggered"
|
||||
);
|
||||
|
||||
Ok(Json(TriggerPrReviewResponse { review_id }))
|
||||
}
|
||||
@@ -16,11 +16,13 @@ use crate::{AppState, auth::require_session};
|
||||
|
||||
mod electric_proxy;
|
||||
mod error;
|
||||
mod github_app;
|
||||
mod identity;
|
||||
mod oauth;
|
||||
pub(crate) mod organization_members;
|
||||
mod organizations;
|
||||
mod projects;
|
||||
mod review;
|
||||
pub mod tasks;
|
||||
mod tokens;
|
||||
|
||||
@@ -49,7 +51,9 @@ pub fn router(state: AppState) -> Router {
|
||||
.route("/health", get(health))
|
||||
.merge(oauth::public_router())
|
||||
.merge(organization_members::public_router())
|
||||
.merge(tokens::public_router());
|
||||
.merge(tokens::public_router())
|
||||
.merge(review::public_router())
|
||||
.merge(github_app::public_router());
|
||||
|
||||
let v1_protected = Router::<AppState>::new()
|
||||
.merge(identity::router())
|
||||
@@ -59,6 +63,7 @@ pub fn router(state: AppState) -> Router {
|
||||
.merge(organization_members::protected_router())
|
||||
.merge(oauth::protected_router())
|
||||
.merge(electric_proxy::router())
|
||||
.merge(github_app::protected_router())
|
||||
.layer(middleware::from_fn_with_state(
|
||||
state.clone(),
|
||||
require_session,
|
||||
|
||||
475
crates/remote/src/routes/review.rs
Normal file
475
crates/remote/src/routes/review.rs
Normal file
@@ -0,0 +1,475 @@
|
||||
use std::net::IpAddr;
|
||||
|
||||
use axum::{
|
||||
Json, Router,
|
||||
body::Body,
|
||||
extract::{Path, State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::{IntoResponse, Response},
|
||||
routing::{get, post},
|
||||
};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
AppState,
|
||||
db::reviews::{CreateReviewParams, ReviewRepository},
|
||||
r2::R2Error,
|
||||
};
|
||||
|
||||
pub fn public_router() -> Router<AppState> {
|
||||
Router::new()
|
||||
.route("/review/init", post(init_review_upload))
|
||||
.route("/review/start", post(start_review))
|
||||
.route("/review/{id}/status", get(get_review_status))
|
||||
.route("/review/{id}", get(get_review))
|
||||
.route("/review/{id}/metadata", get(get_review_metadata))
|
||||
.route("/review/{id}/file/{file_hash}", get(get_review_file))
|
||||
.route("/review/{id}/diff", get(get_review_diff))
|
||||
.route("/review/{id}/success", post(review_success))
|
||||
.route("/review/{id}/failed", post(review_failed))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct InitReviewRequest {
|
||||
pub gh_pr_url: String,
|
||||
pub email: String,
|
||||
pub pr_title: String,
|
||||
#[serde(default)]
|
||||
pub claude_code_session_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub content_type: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct InitReviewResponse {
|
||||
pub review_id: Uuid,
|
||||
pub upload_url: String,
|
||||
pub object_key: String,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ReviewMetadataResponse {
|
||||
pub gh_pr_url: String,
|
||||
pub pr_title: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ReviewError {
|
||||
#[error("R2 storage not configured")]
|
||||
NotConfigured,
|
||||
#[error("failed to generate upload URL: {0}")]
|
||||
R2Error(#[from] R2Error),
|
||||
#[error("rate limit exceeded")]
|
||||
RateLimited,
|
||||
#[error("unable to determine client IP")]
|
||||
MissingClientIp,
|
||||
#[error("database error: {0}")]
|
||||
Database(#[from] crate::db::reviews::ReviewError),
|
||||
#[error("review worker not configured")]
|
||||
WorkerNotConfigured,
|
||||
#[error("review worker request failed: {0}")]
|
||||
WorkerError(#[from] reqwest::Error),
|
||||
#[error("invalid review ID")]
|
||||
InvalidReviewId,
|
||||
}
|
||||
|
||||
impl IntoResponse for ReviewError {
|
||||
fn into_response(self) -> Response {
|
||||
let (status, message) = match &self {
|
||||
ReviewError::NotConfigured => (
|
||||
StatusCode::SERVICE_UNAVAILABLE,
|
||||
"Review upload service not available",
|
||||
),
|
||||
ReviewError::R2Error(e) => {
|
||||
tracing::error!(error = %e, "R2 presign failed");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to generate upload URL",
|
||||
)
|
||||
}
|
||||
ReviewError::RateLimited => (
|
||||
StatusCode::TOO_MANY_REQUESTS,
|
||||
"Rate limit exceeded. Try again later.",
|
||||
),
|
||||
ReviewError::MissingClientIp => {
|
||||
(StatusCode::BAD_REQUEST, "Unable to determine client IP")
|
||||
}
|
||||
ReviewError::Database(crate::db::reviews::ReviewError::NotFound) => {
|
||||
(StatusCode::NOT_FOUND, "Review not found")
|
||||
}
|
||||
ReviewError::Database(e) => {
|
||||
tracing::error!(error = %e, "Database error in review");
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, "Internal server error")
|
||||
}
|
||||
ReviewError::WorkerNotConfigured => (
|
||||
StatusCode::SERVICE_UNAVAILABLE,
|
||||
"Review worker service not available",
|
||||
),
|
||||
ReviewError::WorkerError(e) => {
|
||||
tracing::error!(error = %e, "Review worker request failed");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
"Failed to fetch review from worker",
|
||||
)
|
||||
}
|
||||
ReviewError::InvalidReviewId => (StatusCode::BAD_REQUEST, "Invalid review ID"),
|
||||
};
|
||||
|
||||
let body = serde_json::json!({
|
||||
"error": message
|
||||
});
|
||||
|
||||
(status, Json(body)).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract client IP from headers, with fallbacks for local development
|
||||
fn extract_client_ip(headers: &HeaderMap) -> Option<IpAddr> {
|
||||
// Try Cloudflare header first (production)
|
||||
if let Some(ip) = headers
|
||||
.get("CF-Connecting-IP")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|s| s.parse().ok())
|
||||
{
|
||||
return Some(ip);
|
||||
}
|
||||
|
||||
// Fallback to X-Forwarded-For (common proxy header)
|
||||
if let Some(ip) = headers
|
||||
.get("X-Forwarded-For")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|s| s.split(',').next()) // Take first IP in chain
|
||||
.and_then(|s| s.trim().parse().ok())
|
||||
{
|
||||
return Some(ip);
|
||||
}
|
||||
|
||||
// Fallback to X-Real-IP
|
||||
if let Some(ip) = headers
|
||||
.get("X-Real-IP")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|s| s.parse().ok())
|
||||
{
|
||||
return Some(ip);
|
||||
}
|
||||
|
||||
// For local development, use localhost
|
||||
Some(IpAddr::V4(std::net::Ipv4Addr::LOCALHOST))
|
||||
}
|
||||
|
||||
/// Check rate limits for the given IP address.
|
||||
/// Limits: 2 reviews per minute, 20 reviews per hour.
|
||||
async fn check_rate_limit(repo: &ReviewRepository<'_>, ip: IpAddr) -> Result<(), ReviewError> {
|
||||
let now = Utc::now();
|
||||
|
||||
// Check minute limit (2 per minute)
|
||||
let minute_ago = now - Duration::minutes(1);
|
||||
let minute_count = repo.count_since(ip, minute_ago).await?;
|
||||
if minute_count >= 2 {
|
||||
return Err(ReviewError::RateLimited);
|
||||
}
|
||||
|
||||
// Check hour limit (20 per hour)
|
||||
let hour_ago = now - Duration::hours(1);
|
||||
let hour_count = repo.count_since(ip, hour_ago).await?;
|
||||
if hour_count >= 20 {
|
||||
return Err(ReviewError::RateLimited);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn init_review_upload(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
Json(payload): Json<InitReviewRequest>,
|
||||
) -> Result<Json<InitReviewResponse>, ReviewError> {
|
||||
// 1. Generate the review ID upfront (used in both R2 path and DB record)
|
||||
let review_id = Uuid::new_v4();
|
||||
|
||||
// 2. Extract IP (required for rate limiting)
|
||||
let ip = extract_client_ip(&headers).ok_or(ReviewError::MissingClientIp)?;
|
||||
|
||||
// 3. Check rate limits
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
check_rate_limit(&repo, ip).await?;
|
||||
|
||||
// 4. Get R2 service
|
||||
let r2 = state.r2().ok_or(ReviewError::NotConfigured)?;
|
||||
|
||||
// 5. Generate presigned URL with review ID in path
|
||||
let content_type = payload.content_type.as_deref();
|
||||
let upload = r2.create_presigned_upload(review_id, content_type).await?;
|
||||
|
||||
// 6. Insert DB record with the same review ID, storing folder path
|
||||
let review = repo
|
||||
.create(CreateReviewParams {
|
||||
id: review_id,
|
||||
gh_pr_url: &payload.gh_pr_url,
|
||||
claude_code_session_id: payload.claude_code_session_id.as_deref(),
|
||||
ip_address: ip,
|
||||
r2_path: &upload.folder_path,
|
||||
email: &payload.email,
|
||||
pr_title: &payload.pr_title,
|
||||
})
|
||||
.await?;
|
||||
|
||||
// 7. Return response with review_id
|
||||
Ok(Json(InitReviewResponse {
|
||||
review_id: review.id,
|
||||
upload_url: upload.upload_url,
|
||||
object_key: upload.object_key,
|
||||
expires_at: upload.expires_at,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Proxy a request to the review worker and return the response.
|
||||
async fn proxy_to_worker(state: &AppState, path: &str) -> Result<Response, ReviewError> {
|
||||
let base_url = state
|
||||
.config
|
||||
.review_worker_base_url
|
||||
.as_ref()
|
||||
.ok_or(ReviewError::WorkerNotConfigured)?;
|
||||
|
||||
let url = format!("{}{}", base_url.trim_end_matches('/'), path);
|
||||
|
||||
let response = state.http_client.get(&url).send().await?;
|
||||
|
||||
let status = response.status();
|
||||
let headers = response.headers().clone();
|
||||
let bytes = response.bytes().await?;
|
||||
|
||||
let mut builder = Response::builder().status(status);
|
||||
|
||||
// Copy relevant headers from the worker response
|
||||
if let Some(content_type) = headers.get("content-type") {
|
||||
builder = builder.header("content-type", content_type);
|
||||
}
|
||||
|
||||
Ok(builder.body(Body::from(bytes)).unwrap())
|
||||
}
|
||||
|
||||
/// Proxy a POST request with JSON body to the review worker
|
||||
async fn proxy_post_to_worker(
|
||||
state: &AppState,
|
||||
path: &str,
|
||||
body: serde_json::Value,
|
||||
) -> Result<Response, ReviewError> {
|
||||
let base_url = state
|
||||
.config
|
||||
.review_worker_base_url
|
||||
.as_ref()
|
||||
.ok_or(ReviewError::WorkerNotConfigured)?;
|
||||
|
||||
let url = format!("{}{}", base_url.trim_end_matches('/'), path);
|
||||
|
||||
let response = state.http_client.post(&url).json(&body).send().await?;
|
||||
|
||||
let status = response.status();
|
||||
let headers = response.headers().clone();
|
||||
let bytes = response.bytes().await?;
|
||||
|
||||
let mut builder = Response::builder().status(status);
|
||||
|
||||
if let Some(content_type) = headers.get("content-type") {
|
||||
builder = builder.header("content-type", content_type);
|
||||
}
|
||||
|
||||
Ok(builder.body(Body::from(bytes)).unwrap())
|
||||
}
|
||||
|
||||
/// POST /review/start - Start review processing on worker
|
||||
pub async fn start_review(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<serde_json::Value>,
|
||||
) -> Result<Response, ReviewError> {
|
||||
proxy_post_to_worker(&state, "/review/start", body).await
|
||||
}
|
||||
|
||||
/// GET /review/:id/status - Get review status from worker
|
||||
pub async fn get_review_status(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<Response, ReviewError> {
|
||||
let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?;
|
||||
|
||||
// Verify review exists in our database
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
let _review = repo.get_by_id(review_id).await?;
|
||||
|
||||
// Proxy to worker
|
||||
proxy_to_worker(&state, &format!("/review/{}/status", review_id)).await
|
||||
}
|
||||
|
||||
/// GET /review/:id/metadata - Get PR metadata from database
|
||||
pub async fn get_review_metadata(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<Json<ReviewMetadataResponse>, ReviewError> {
|
||||
let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?;
|
||||
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
let review = repo.get_by_id(review_id).await?;
|
||||
|
||||
Ok(Json(ReviewMetadataResponse {
|
||||
gh_pr_url: review.gh_pr_url,
|
||||
pr_title: review.pr_title,
|
||||
}))
|
||||
}
|
||||
|
||||
/// GET /review/:id - Get complete review result from worker
|
||||
pub async fn get_review(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<Response, ReviewError> {
|
||||
let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?;
|
||||
|
||||
// Verify review exists in our database
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
let _review = repo.get_by_id(review_id).await?;
|
||||
|
||||
// Proxy to worker
|
||||
proxy_to_worker(&state, &format!("/review/{}", review_id)).await
|
||||
}
|
||||
|
||||
/// GET /review/:id/file/:file_hash - Get file content from worker
|
||||
pub async fn get_review_file(
|
||||
State(state): State<AppState>,
|
||||
Path((id, file_hash)): Path<(String, String)>,
|
||||
) -> Result<Response, ReviewError> {
|
||||
let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?;
|
||||
|
||||
// Verify review exists in our database
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
let _review = repo.get_by_id(review_id).await?;
|
||||
|
||||
// Proxy to worker
|
||||
proxy_to_worker(&state, &format!("/review/{}/file/{}", review_id, file_hash)).await
|
||||
}
|
||||
|
||||
/// GET /review/:id/diff - Get diff for review from worker
|
||||
pub async fn get_review_diff(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<Response, ReviewError> {
|
||||
let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?;
|
||||
|
||||
// Verify review exists in our database
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
let _review = repo.get_by_id(review_id).await?;
|
||||
|
||||
// Proxy to worker
|
||||
proxy_to_worker(&state, &format!("/review/{}/diff", review_id)).await
|
||||
}
|
||||
|
||||
/// POST /review/:id/success - Called by worker when review completes successfully
|
||||
/// Sends success notification email to the user, or posts PR comment for webhook reviews
|
||||
pub async fn review_success(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<StatusCode, ReviewError> {
|
||||
let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?;
|
||||
|
||||
// Fetch review from database to get email and PR title
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
let review = repo.get_by_id(review_id).await?;
|
||||
|
||||
// Mark review as completed
|
||||
repo.mark_completed(review_id).await?;
|
||||
|
||||
// Build review URL
|
||||
let review_url = format!("{}/review/{}", state.server_public_base_url, review_id);
|
||||
|
||||
// Check if this is a webhook-triggered review
|
||||
if review.is_webhook_review() {
|
||||
// Post PR comment instead of sending email
|
||||
if let Some(github_app) = state.github_app() {
|
||||
let comment = format!(
|
||||
"## Vibe Kanban Review Complete\n\n\
|
||||
Your code review is ready!\n\n\
|
||||
**[View Review]({})**",
|
||||
review_url
|
||||
);
|
||||
|
||||
let installation_id = review.github_installation_id.unwrap_or(0);
|
||||
let pr_owner = review.pr_owner.as_deref().unwrap_or("");
|
||||
let pr_repo = review.pr_repo.as_deref().unwrap_or("");
|
||||
let pr_number = review.pr_number.unwrap_or(0) as u64;
|
||||
|
||||
if let Err(e) = github_app
|
||||
.post_pr_comment(installation_id, pr_owner, pr_repo, pr_number, &comment)
|
||||
.await
|
||||
{
|
||||
tracing::error!(
|
||||
?e,
|
||||
review_id = %review_id,
|
||||
"Failed to post success comment to PR"
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if let Some(email) = &review.email {
|
||||
// CLI review - send email notification
|
||||
state
|
||||
.mailer
|
||||
.send_review_ready(email, &review_url, &review.pr_title)
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(StatusCode::OK)
|
||||
}
|
||||
|
||||
/// POST /review/:id/failed - Called by worker when review fails
|
||||
/// Sends failure notification email to the user, or posts PR comment for webhook reviews
|
||||
pub async fn review_failed(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<String>,
|
||||
) -> Result<StatusCode, ReviewError> {
|
||||
let review_id: Uuid = id.parse().map_err(|_| ReviewError::InvalidReviewId)?;
|
||||
|
||||
// Fetch review from database to get email and PR title
|
||||
let repo = ReviewRepository::new(state.pool());
|
||||
let review = repo.get_by_id(review_id).await?;
|
||||
|
||||
// Mark review as failed
|
||||
repo.mark_failed(review_id).await?;
|
||||
|
||||
// Check if this is a webhook-triggered review
|
||||
if review.is_webhook_review() {
|
||||
// Post PR comment instead of sending email
|
||||
if let Some(github_app) = state.github_app() {
|
||||
let comment = format!(
|
||||
"## Vibe Kanban Review Failed\n\n\
|
||||
Unfortunately, the code review could not be completed.\n\n\
|
||||
Review ID: `{}`",
|
||||
review_id
|
||||
);
|
||||
|
||||
let installation_id = review.github_installation_id.unwrap_or(0);
|
||||
let pr_owner = review.pr_owner.as_deref().unwrap_or("");
|
||||
let pr_repo = review.pr_repo.as_deref().unwrap_or("");
|
||||
let pr_number = review.pr_number.unwrap_or(0) as u64;
|
||||
|
||||
if let Err(e) = github_app
|
||||
.post_pr_comment(installation_id, pr_owner, pr_repo, pr_number, &comment)
|
||||
.await
|
||||
{
|
||||
tracing::error!(
|
||||
?e,
|
||||
review_id = %review_id,
|
||||
"Failed to post failure comment to PR"
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if let Some(email) = &review.email {
|
||||
// CLI review - send email notification
|
||||
state
|
||||
.mailer
|
||||
.send_review_failed(email, &review.pr_title, &review_id.to_string())
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(StatusCode::OK)
|
||||
}
|
||||
@@ -5,7 +5,9 @@ use sqlx::PgPool;
|
||||
use crate::{
|
||||
auth::{JwtService, OAuthHandoffService, OAuthTokenValidator, ProviderRegistry},
|
||||
config::RemoteServerConfig,
|
||||
github_app::GitHubAppService,
|
||||
mail::Mailer,
|
||||
r2::R2Service,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -18,6 +20,8 @@ pub struct AppState {
|
||||
pub http_client: reqwest::Client,
|
||||
handoff: Arc<OAuthHandoffService>,
|
||||
oauth_token_validator: Arc<OAuthTokenValidator>,
|
||||
r2: Option<R2Service>,
|
||||
github_app: Option<Arc<GitHubAppService>>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
@@ -31,6 +35,8 @@ impl AppState {
|
||||
mailer: Arc<dyn Mailer>,
|
||||
server_public_base_url: String,
|
||||
http_client: reqwest::Client,
|
||||
r2: Option<R2Service>,
|
||||
github_app: Option<Arc<GitHubAppService>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
pool,
|
||||
@@ -41,6 +47,8 @@ impl AppState {
|
||||
http_client,
|
||||
handoff,
|
||||
oauth_token_validator,
|
||||
r2,
|
||||
github_app,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,4 +75,12 @@ impl AppState {
|
||||
pub fn oauth_token_validator(&self) -> Arc<OAuthTokenValidator> {
|
||||
Arc::clone(&self.oauth_token_validator)
|
||||
}
|
||||
|
||||
pub fn r2(&self) -> Option<&R2Service> {
|
||||
self.r2.as_ref()
|
||||
}
|
||||
|
||||
pub fn github_app(&self) -> Option<&GitHubAppService> {
|
||||
self.github_app.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
29
crates/review/Cargo.toml
Normal file
29
crates/review/Cargo.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "review"
|
||||
version = "0.0.134"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
name = "review"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4", features = ["derive", "env"] }
|
||||
tokio = { workspace = true }
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tar = "0.4"
|
||||
flate2 = "1.0"
|
||||
indicatif = "0.17"
|
||||
anyhow = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
uuid = { version = "1.0", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
tempfile = "3.8"
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
dialoguer = "0.11"
|
||||
dirs = "5.0"
|
||||
toml = "0.8"
|
||||
208
crates/review/src/api.rs
Normal file
208
crates/review/src/api.rs
Normal file
@@ -0,0 +1,208 @@
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::debug;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::ReviewError;
|
||||
|
||||
/// API client for the review service
|
||||
pub struct ReviewApiClient {
|
||||
client: Client,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
/// Response from POST /review/init
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct InitResponse {
|
||||
pub review_id: Uuid,
|
||||
pub upload_url: String,
|
||||
pub object_key: String,
|
||||
}
|
||||
|
||||
/// Request body for POST /review/init
|
||||
#[derive(Debug, Serialize)]
|
||||
struct InitRequest {
|
||||
gh_pr_url: String,
|
||||
email: String,
|
||||
pr_title: String,
|
||||
}
|
||||
|
||||
/// Request body for POST /review/start
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StartRequest {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub org: String,
|
||||
pub repo: String,
|
||||
pub codebase_url: String,
|
||||
pub base_commit: String,
|
||||
}
|
||||
|
||||
/// Response from GET /review/{id}/status
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StatusResponse {
|
||||
pub status: ReviewStatus,
|
||||
pub progress: Option<String>,
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
/// Possible review statuses
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ReviewStatus {
|
||||
Queued,
|
||||
Extracting,
|
||||
Running,
|
||||
Completed,
|
||||
Failed,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ReviewStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ReviewStatus::Queued => write!(f, "queued"),
|
||||
ReviewStatus::Extracting => write!(f, "extracting"),
|
||||
ReviewStatus::Running => write!(f, "running"),
|
||||
ReviewStatus::Completed => write!(f, "completed"),
|
||||
ReviewStatus::Failed => write!(f, "failed"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ReviewApiClient {
|
||||
/// Create a new API client
|
||||
pub fn new(base_url: String) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
base_url,
|
||||
}
|
||||
}
|
||||
|
||||
/// Initialize a review upload and get a presigned URL
|
||||
pub async fn init(
|
||||
&self,
|
||||
pr_url: &str,
|
||||
email: &str,
|
||||
pr_title: &str,
|
||||
) -> Result<InitResponse, ReviewError> {
|
||||
let url = format!("{}/v1/review/init", self.base_url);
|
||||
debug!("POST {url}");
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&InitRequest {
|
||||
gh_pr_url: pr_url.to_string(),
|
||||
email: email.to_string(),
|
||||
pr_title: pr_title.to_string(),
|
||||
})
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ReviewError::ApiError(e.to_string()))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| "Unknown error".to_string());
|
||||
return Err(ReviewError::ApiError(format!("{status}: {body}")));
|
||||
}
|
||||
|
||||
let init_response: InitResponse = response
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| ReviewError::ApiError(e.to_string()))?;
|
||||
|
||||
debug!("Review ID: {}", init_response.review_id);
|
||||
|
||||
Ok(init_response)
|
||||
}
|
||||
|
||||
/// Upload the tarball to the presigned URL
|
||||
pub async fn upload(&self, upload_url: &str, payload: Vec<u8>) -> Result<(), ReviewError> {
|
||||
debug!("PUT {} ({} bytes)", upload_url, payload.len());
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.put(upload_url)
|
||||
.header("Content-Type", "application/gzip")
|
||||
.body(payload)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ReviewError::UploadFailed(e.to_string()))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| "Unknown error".to_string());
|
||||
return Err(ReviewError::UploadFailed(format!("{status}: {body}")));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start the review process
|
||||
pub async fn start(&self, request: StartRequest) -> Result<(), ReviewError> {
|
||||
let url = format!("{}/v1/review/start", self.base_url);
|
||||
debug!("POST {url}");
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&request)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ReviewError::ApiError(e.to_string()))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| "Unknown error".to_string());
|
||||
return Err(ReviewError::ApiError(format!("{status}: {body}")));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Poll the review status
|
||||
pub async fn poll_status(&self, review_id: &str) -> Result<StatusResponse, ReviewError> {
|
||||
let url = format!("{}/v1/review/{}/status", self.base_url, review_id);
|
||||
debug!("GET {url}");
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ReviewError::ApiError(e.to_string()))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| "Unknown error".to_string());
|
||||
return Err(ReviewError::ApiError(format!("{status}: {body}")));
|
||||
}
|
||||
|
||||
let status_response: StatusResponse = response
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| ReviewError::ApiError(e.to_string()))?;
|
||||
|
||||
Ok(status_response)
|
||||
}
|
||||
|
||||
/// Get the review URL for a given review ID
|
||||
pub fn review_url(&self, review_id: &str) -> String {
|
||||
format!("{}/review/{}", self.base_url, review_id)
|
||||
}
|
||||
}
|
||||
106
crates/review/src/archive.rs
Normal file
106
crates/review/src/archive.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use std::{fs::File, path::Path};
|
||||
|
||||
use flate2::{Compression, write::GzEncoder};
|
||||
use tar::Builder;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::error::ReviewError;
|
||||
|
||||
/// Create a tar.gz archive from a directory
|
||||
pub fn create_tarball(source_dir: &Path) -> Result<Vec<u8>, ReviewError> {
|
||||
debug!("Creating tarball from {}", source_dir.display());
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
{
|
||||
let encoder = GzEncoder::new(&mut buffer, Compression::default());
|
||||
let mut archive = Builder::new(encoder);
|
||||
|
||||
add_directory_to_archive(&mut archive, source_dir, source_dir)?;
|
||||
|
||||
let encoder = archive
|
||||
.into_inner()
|
||||
.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
encoder
|
||||
.finish()
|
||||
.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
}
|
||||
|
||||
debug!("Created tarball: {} bytes", buffer.len());
|
||||
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
fn add_directory_to_archive<W: std::io::Write>(
|
||||
archive: &mut Builder<W>,
|
||||
base_dir: &Path,
|
||||
current_dir: &Path,
|
||||
) -> Result<(), ReviewError> {
|
||||
let entries =
|
||||
std::fs::read_dir(current_dir).map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
let path = entry.path();
|
||||
|
||||
let relative_path = path
|
||||
.strip_prefix(base_dir)
|
||||
.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
|
||||
let metadata = entry
|
||||
.metadata()
|
||||
.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
|
||||
if metadata.is_dir() {
|
||||
// Recursively add directory contents
|
||||
add_directory_to_archive(archive, base_dir, &path)?;
|
||||
} else if metadata.is_file() {
|
||||
// Add file to archive
|
||||
let mut file =
|
||||
File::open(&path).map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
archive
|
||||
.append_file(relative_path, &mut file)
|
||||
.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
}
|
||||
// Skip symlinks and other special files
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use tempfile::TempDir;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_create_tarball() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let base = temp_dir.path();
|
||||
|
||||
// Create some test files
|
||||
std::fs::write(base.join("file1.txt"), "content1").unwrap();
|
||||
std::fs::create_dir(base.join("subdir")).unwrap();
|
||||
std::fs::write(base.join("subdir/file2.txt"), "content2").unwrap();
|
||||
|
||||
let tarball = create_tarball(base).expect("Should create tarball");
|
||||
|
||||
// Verify tarball is not empty
|
||||
assert!(!tarball.is_empty());
|
||||
|
||||
// Decompress and verify contents
|
||||
let decoder = flate2::read::GzDecoder::new(&tarball[..]);
|
||||
let mut archive = tar::Archive::new(decoder);
|
||||
|
||||
let entries: Vec<_> = archive
|
||||
.entries()
|
||||
.unwrap()
|
||||
.map(|e| e.unwrap().path().unwrap().to_string_lossy().to_string())
|
||||
.collect();
|
||||
|
||||
assert!(entries.contains(&"file1.txt".to_string()));
|
||||
assert!(entries.contains(&"subdir/file2.txt".to_string()));
|
||||
}
|
||||
}
|
||||
513
crates/review/src/claude_session.rs
Normal file
513
crates/review/src/claude_session.rs
Normal file
@@ -0,0 +1,513 @@
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::{BufRead, BufReader},
|
||||
path::{Path, PathBuf},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use serde::Deserialize;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::error::ReviewError;
|
||||
|
||||
/// Represents a Claude Code project directory
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ClaudeProject {
|
||||
pub path: PathBuf,
|
||||
pub name: String,
|
||||
pub git_branch: Option<String>,
|
||||
pub first_prompt: Option<String>,
|
||||
pub session_count: usize,
|
||||
pub modified_at: SystemTime,
|
||||
}
|
||||
|
||||
/// Represents a single session file within a project
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ClaudeSession {
|
||||
pub path: PathBuf,
|
||||
pub git_branch: Option<String>,
|
||||
pub first_prompt: Option<String>,
|
||||
pub modified_at: SystemTime,
|
||||
}
|
||||
|
||||
/// A JSONL record for metadata extraction
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct JsonlRecord {
|
||||
git_branch: Option<String>,
|
||||
message: Option<JsonlMessage>,
|
||||
}
|
||||
|
||||
/// Message within a JSONL record
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct JsonlMessage {
|
||||
role: Option<String>,
|
||||
content: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Get the Claude projects directory path (~/.claude/projects)
|
||||
pub fn get_claude_projects_dir() -> Option<PathBuf> {
|
||||
dirs::home_dir().map(|home| home.join(".claude").join("projects"))
|
||||
}
|
||||
|
||||
/// Discover all Claude projects, sorted by modification time (most recent first)
|
||||
/// Aggregates session metadata (git_branch, first_prompt, session_count) from each project's sessions
|
||||
pub fn discover_projects() -> Result<Vec<ClaudeProject>, ReviewError> {
|
||||
let projects_dir = get_claude_projects_dir().ok_or_else(|| {
|
||||
ReviewError::SessionDiscoveryFailed("Could not find home directory".into())
|
||||
})?;
|
||||
|
||||
if !projects_dir.exists() {
|
||||
debug!(
|
||||
"Claude projects directory does not exist: {:?}",
|
||||
projects_dir
|
||||
);
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut projects = Vec::new();
|
||||
|
||||
let entries = fs::read_dir(&projects_dir)
|
||||
.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
let path = entry.path();
|
||||
|
||||
if !path.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let metadata = entry
|
||||
.metadata()
|
||||
.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
|
||||
let modified_at = metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH);
|
||||
|
||||
// Extract a friendly name from the directory name
|
||||
// e.g., "-private-var-...-worktrees-a04a-store-payloads-i" -> "store-payloads-i"
|
||||
let dir_name = path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("unknown");
|
||||
|
||||
let name = extract_project_name(dir_name);
|
||||
|
||||
// Discover sessions to get aggregated metadata
|
||||
let sessions = discover_sessions_in_dir(&path)?;
|
||||
let session_count = sessions.len();
|
||||
|
||||
// Skip projects with no sessions
|
||||
if session_count == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get metadata from the most recent session
|
||||
let most_recent = &sessions[0]; // Already sorted by modification time
|
||||
let git_branch = most_recent.git_branch.clone();
|
||||
let first_prompt = most_recent.first_prompt.clone();
|
||||
|
||||
projects.push(ClaudeProject {
|
||||
path,
|
||||
name,
|
||||
git_branch,
|
||||
first_prompt,
|
||||
session_count,
|
||||
modified_at,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by modification time, most recent first
|
||||
projects.sort_by(|a, b| b.modified_at.cmp(&a.modified_at));
|
||||
|
||||
Ok(projects)
|
||||
}
|
||||
|
||||
/// Extract a friendly project name from the Claude directory name
|
||||
fn extract_project_name(dir_name: &str) -> String {
|
||||
// Directory names look like:
|
||||
// "-private-var-folders-m1-9q-ct1913z10v6wbnv54j25r0000gn-T-vibe-kanban-worktrees-a04a-store-payloads-i"
|
||||
// We want to extract the meaningful part after "worktrees-"
|
||||
if let Some(idx) = dir_name.find("worktrees-") {
|
||||
let after_worktrees = &dir_name[idx + "worktrees-".len()..];
|
||||
// Skip the short hash prefix (e.g., "a04a-")
|
||||
if let Some(dash_idx) = after_worktrees.find('-') {
|
||||
return after_worktrees[dash_idx + 1..].to_string();
|
||||
}
|
||||
return after_worktrees.to_string();
|
||||
}
|
||||
|
||||
// Fallback: use last segment after the final dash
|
||||
dir_name.rsplit('-').next().unwrap_or(dir_name).to_string()
|
||||
}
|
||||
|
||||
/// Discover sessions in a project, excluding agent-* files
|
||||
pub fn discover_sessions(project: &ClaudeProject) -> Result<Vec<ClaudeSession>, ReviewError> {
|
||||
discover_sessions_in_dir(&project.path)
|
||||
}
|
||||
|
||||
/// Discover sessions in a directory, excluding agent-* files
|
||||
fn discover_sessions_in_dir(dir_path: &Path) -> Result<Vec<ClaudeSession>, ReviewError> {
|
||||
let mut sessions = Vec::new();
|
||||
|
||||
let entries =
|
||||
fs::read_dir(dir_path).map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
let path = entry.path();
|
||||
|
||||
// Only process .jsonl files
|
||||
if path.extension().and_then(|e| e.to_str()) != Some("jsonl") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
|
||||
|
||||
// Skip agent-* files
|
||||
if file_name.starts_with("agent-") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let metadata = entry
|
||||
.metadata()
|
||||
.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
|
||||
let modified_at = metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH);
|
||||
|
||||
// Extract metadata from the JSONL file
|
||||
let (git_branch, first_prompt) = extract_session_metadata(&path);
|
||||
|
||||
sessions.push(ClaudeSession {
|
||||
path,
|
||||
git_branch,
|
||||
first_prompt,
|
||||
modified_at,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by modification time, most recent first
|
||||
sessions.sort_by(|a, b| b.modified_at.cmp(&a.modified_at));
|
||||
|
||||
Ok(sessions)
|
||||
}
|
||||
|
||||
/// Extract session metadata from a JSONL file
|
||||
/// Returns: (git_branch, first_prompt)
|
||||
fn extract_session_metadata(path: &Path) -> (Option<String>, Option<String>) {
|
||||
let file = match File::open(path) {
|
||||
Ok(f) => f,
|
||||
Err(_) => return (None, None),
|
||||
};
|
||||
let reader = BufReader::new(file);
|
||||
|
||||
let mut git_branch: Option<String> = None;
|
||||
let mut first_prompt: Option<String> = None;
|
||||
|
||||
// Check first 50 lines for metadata
|
||||
for line in reader.lines().take(50) {
|
||||
let line = match line {
|
||||
Ok(l) => l,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Ok(record) = serde_json::from_str::<JsonlRecord>(&line) {
|
||||
// Extract git branch if not already found
|
||||
if git_branch.is_none() && record.git_branch.is_some() {
|
||||
git_branch = record.git_branch;
|
||||
}
|
||||
|
||||
// Extract first user prompt if not already found
|
||||
if first_prompt.is_none()
|
||||
&& let Some(ref message) = record.message
|
||||
&& message.role.as_deref() == Some("user")
|
||||
&& let Some(ref content) = message.content
|
||||
{
|
||||
// Content can be a string or an array
|
||||
if let Some(text) = content.as_str() {
|
||||
first_prompt = Some(truncate_string(text, 60));
|
||||
}
|
||||
}
|
||||
|
||||
// Stop early if we have both
|
||||
if git_branch.is_some() && first_prompt.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(git_branch, first_prompt)
|
||||
}
|
||||
|
||||
/// Truncate a string to max length, adding "..." if truncated
|
||||
fn truncate_string(s: &str, max_len: usize) -> String {
|
||||
// Replace newlines with spaces for display
|
||||
let s = s.replace('\n', " ");
|
||||
if s.len() <= max_len {
|
||||
s
|
||||
} else {
|
||||
format!("{}...", &s[..max_len - 3])
|
||||
}
|
||||
}
|
||||
|
||||
/// Find projects matching a specific git branch using fuzzy matching
|
||||
/// Returns matching projects with all their sessions
|
||||
pub fn find_projects_by_branch(
|
||||
projects: &[ClaudeProject],
|
||||
target_branch: &str,
|
||||
) -> Result<Vec<(ClaudeProject, Vec<ClaudeSession>)>, ReviewError> {
|
||||
let mut matches = Vec::new();
|
||||
|
||||
for project in projects {
|
||||
// Check if project's branch matches
|
||||
if let Some(ref project_branch) = project.git_branch
|
||||
&& branches_match(target_branch, project_branch)
|
||||
{
|
||||
let sessions = discover_sessions(project)?;
|
||||
matches.push((project.clone(), sessions));
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by modification time, most recent first
|
||||
matches.sort_by(|a, b| b.0.modified_at.cmp(&a.0.modified_at));
|
||||
|
||||
Ok(matches)
|
||||
}
|
||||
|
||||
/// Check if two branch names match using fuzzy matching
|
||||
fn branches_match(target: &str, session_branch: &str) -> bool {
|
||||
let target_normalized = normalize_branch(target);
|
||||
let session_normalized = normalize_branch(session_branch);
|
||||
|
||||
// Exact match after normalization
|
||||
if target_normalized == session_normalized {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if the slug portions match (e.g., "feature-auth" matches "vk/feature-auth")
|
||||
let target_slug = extract_branch_slug(&target_normalized);
|
||||
let session_slug = extract_branch_slug(&session_normalized);
|
||||
|
||||
target_slug == session_slug && !target_slug.is_empty()
|
||||
}
|
||||
|
||||
/// Normalize a branch name by stripping common prefixes
|
||||
fn normalize_branch(branch: &str) -> String {
|
||||
let branch = branch.strip_prefix("refs/heads/").unwrap_or(branch);
|
||||
|
||||
branch.to_lowercase()
|
||||
}
|
||||
|
||||
/// Extract the "slug" portion of a branch name
|
||||
/// e.g., "vk/a04a-store-payloads-i" -> "a04a-store-payloads-i"
|
||||
fn extract_branch_slug(branch: &str) -> String {
|
||||
// Split by '/' and take the last part
|
||||
branch.rsplit('/').next().unwrap_or(branch).to_string()
|
||||
}
|
||||
|
||||
/// A record with timestamp for sorting
|
||||
struct TimestampedMessage {
|
||||
timestamp: String,
|
||||
message: serde_json::Value,
|
||||
}
|
||||
|
||||
/// Concatenate multiple JSONL files into a single JSON array of messages.
|
||||
///
|
||||
/// Filters to include only:
|
||||
/// - User messages (role = "user")
|
||||
/// - Assistant messages with text content (role = "assistant" with content[].type = "text")
|
||||
///
|
||||
/// For assistant messages, only text content blocks are kept (tool_use, etc. are filtered out).
|
||||
pub fn concatenate_sessions_to_json(session_paths: &[PathBuf]) -> Result<String, ReviewError> {
|
||||
let mut all_messages: Vec<TimestampedMessage> = Vec::new();
|
||||
|
||||
for path in session_paths {
|
||||
let file = File::open(path)
|
||||
.map_err(|e| ReviewError::JsonlParseFailed(format!("{}: {}", path.display(), e)))?;
|
||||
let reader = BufReader::new(file);
|
||||
|
||||
for (line_num, line) in reader.lines().enumerate() {
|
||||
let line = line.map_err(|e| {
|
||||
ReviewError::JsonlParseFailed(format!("{}:{}: {}", path.display(), line_num + 1, e))
|
||||
})?;
|
||||
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let record: serde_json::Value = serde_json::from_str(&line).map_err(|e| {
|
||||
ReviewError::JsonlParseFailed(format!("{}:{}: {}", path.display(), line_num + 1, e))
|
||||
})?;
|
||||
|
||||
// Extract timestamp for sorting
|
||||
let timestamp = record
|
||||
.get("timestamp")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
// Extract and filter the message
|
||||
if let Some(message) = extract_filtered_message(&record) {
|
||||
all_messages.push(TimestampedMessage { timestamp, message });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp
|
||||
all_messages.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
|
||||
|
||||
// Extract just the messages
|
||||
let messages: Vec<serde_json::Value> = all_messages.into_iter().map(|m| m.message).collect();
|
||||
|
||||
serde_json::to_string(&messages).map_err(|e| ReviewError::JsonlParseFailed(e.to_string()))
|
||||
}
|
||||
|
||||
/// Extract and filter a message from a JSONL record.
|
||||
///
|
||||
/// Returns Some(message) if the record should be included, None otherwise.
|
||||
/// - User messages: include if content is a string, or if content array has text blocks
|
||||
/// - Assistant messages: include if content array has text blocks (filter out tool_use, etc.)
|
||||
fn extract_filtered_message(record: &serde_json::Value) -> Option<serde_json::Value> {
|
||||
let message = record.get("message")?;
|
||||
let role = message.get("role")?.as_str()?;
|
||||
let content = message.get("content")?;
|
||||
|
||||
match role {
|
||||
"user" => {
|
||||
// If content is a string, include directly
|
||||
if content.is_string() {
|
||||
return Some(message.clone());
|
||||
}
|
||||
|
||||
// If content is an array, filter to text blocks only
|
||||
if let Some(content_array) = content.as_array() {
|
||||
let text_blocks: Vec<serde_json::Value> = content_array
|
||||
.iter()
|
||||
.filter(|block| block.get("type").and_then(|t| t.as_str()) == Some("text"))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Skip if no text content (e.g., only tool_result)
|
||||
if text_blocks.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Create filtered message with only text content
|
||||
let mut filtered_message = serde_json::Map::new();
|
||||
filtered_message.insert(
|
||||
"role".to_string(),
|
||||
serde_json::Value::String("user".to_string()),
|
||||
);
|
||||
filtered_message
|
||||
.insert("content".to_string(), serde_json::Value::Array(text_blocks));
|
||||
|
||||
return Some(serde_json::Value::Object(filtered_message));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
"assistant" => {
|
||||
// Filter assistant messages to only include text content
|
||||
if let Some(content_array) = content.as_array() {
|
||||
// Filter to only text blocks
|
||||
let text_blocks: Vec<serde_json::Value> = content_array
|
||||
.iter()
|
||||
.filter(|block| block.get("type").and_then(|t| t.as_str()) == Some("text"))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Skip if no text content
|
||||
if text_blocks.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Create filtered message with only text content
|
||||
let mut filtered_message = serde_json::Map::new();
|
||||
filtered_message.insert(
|
||||
"role".to_string(),
|
||||
serde_json::Value::String("assistant".to_string()),
|
||||
);
|
||||
filtered_message
|
||||
.insert("content".to_string(), serde_json::Value::Array(text_blocks));
|
||||
|
||||
Some(serde_json::Value::Object(filtered_message))
|
||||
} else {
|
||||
// Content is not an array (unusual), skip
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_extract_project_name() {
|
||||
assert_eq!(
|
||||
extract_project_name(
|
||||
"-private-var-folders-m1-9q-ct1913z10v6wbnv54j25r0000gn-T-vibe-kanban-worktrees-a04a-store-payloads-i"
|
||||
),
|
||||
"store-payloads-i"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
extract_project_name(
|
||||
"-private-var-folders-m1-9q-ct1913z10v6wbnv54j25r0000gn-T-vibe-kanban-worktrees-1ff1-new-rust-binary"
|
||||
),
|
||||
"new-rust-binary"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_branches_match() {
|
||||
// Exact match
|
||||
assert!(branches_match("feature-auth", "feature-auth"));
|
||||
|
||||
// With prefix
|
||||
assert!(branches_match("feature-auth", "vk/feature-auth"));
|
||||
assert!(branches_match("vk/feature-auth", "feature-auth"));
|
||||
|
||||
// Slug matching
|
||||
assert!(branches_match(
|
||||
"a04a-store-payloads-i",
|
||||
"vk/a04a-store-payloads-i"
|
||||
));
|
||||
|
||||
// Case insensitive
|
||||
assert!(branches_match("Feature-Auth", "feature-auth"));
|
||||
|
||||
// Non-matches
|
||||
assert!(!branches_match("feature-auth", "feature-other"));
|
||||
assert!(!branches_match("main", "feature-auth"));
|
||||
|
||||
// Regression tests: substring matches should NOT match
|
||||
// (these were incorrectly matching before the fix)
|
||||
assert!(!branches_match("vk/d13f-remove-compare-c", "c"));
|
||||
assert!(!branches_match("vk/d13f-remove-compare-c", "compare"));
|
||||
assert!(!branches_match("feature-auth", "auth"));
|
||||
assert!(!branches_match("feature-auth", "feature"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_branch() {
|
||||
assert_eq!(normalize_branch("refs/heads/main"), "main");
|
||||
assert_eq!(normalize_branch("Feature-Auth"), "feature-auth");
|
||||
assert_eq!(normalize_branch("vk/feature-auth"), "vk/feature-auth");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_branch_slug() {
|
||||
assert_eq!(extract_branch_slug("vk/feature-auth"), "feature-auth");
|
||||
assert_eq!(extract_branch_slug("feature-auth"), "feature-auth");
|
||||
assert_eq!(
|
||||
extract_branch_slug("user/prefix/feature-auth"),
|
||||
"feature-auth"
|
||||
);
|
||||
}
|
||||
}
|
||||
47
crates/review/src/config.rs
Normal file
47
crates/review/src/config.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(default)]
|
||||
pub email: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Get the path to the config file (~/.config/vibe-kanban/review.toml)
|
||||
fn config_path() -> Option<PathBuf> {
|
||||
dirs::config_dir().map(|p| p.join("vibe-kanban").join("review.toml"))
|
||||
}
|
||||
|
||||
/// Load config from disk, returning default if file doesn't exist
|
||||
pub fn load() -> Self {
|
||||
let Some(path) = Self::config_path() else {
|
||||
return Self::default();
|
||||
};
|
||||
|
||||
if !path.exists() {
|
||||
return Self::default();
|
||||
}
|
||||
|
||||
match std::fs::read_to_string(&path) {
|
||||
Ok(contents) => toml::from_str(&contents).unwrap_or_default(),
|
||||
Err(_) => Self::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Save config to disk
|
||||
pub fn save(&self) -> std::io::Result<()> {
|
||||
let Some(path) = Self::config_path() else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Create parent directories if needed
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let contents = toml::to_string_pretty(self).unwrap_or_default();
|
||||
std::fs::write(&path, contents)
|
||||
}
|
||||
}
|
||||
43
crates/review/src/error.rs
Normal file
43
crates/review/src/error.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ReviewError {
|
||||
#[error("GitHub CLI (gh) is not installed. Install it from https://cli.github.com/")]
|
||||
GhNotInstalled,
|
||||
|
||||
#[error("GitHub CLI is not authenticated. Run 'gh auth login' first.")]
|
||||
GhNotAuthenticated,
|
||||
|
||||
#[error("Invalid GitHub PR URL format. Expected: https://github.com/owner/repo/pull/123")]
|
||||
InvalidPrUrl,
|
||||
|
||||
#[error("Failed to get PR information: {0}")]
|
||||
PrInfoFailed(String),
|
||||
|
||||
#[error("Failed to clone repository: {0}")]
|
||||
CloneFailed(String),
|
||||
|
||||
#[error("Failed to checkout PR: {0}")]
|
||||
CheckoutFailed(String),
|
||||
|
||||
#[error("Failed to create archive: {0}")]
|
||||
ArchiveFailed(String),
|
||||
|
||||
#[error("API request failed: {0}")]
|
||||
ApiError(String),
|
||||
|
||||
#[error("Upload failed: {0}")]
|
||||
UploadFailed(String),
|
||||
|
||||
#[error("Review failed: {0}")]
|
||||
ReviewFailed(String),
|
||||
|
||||
#[error("Review timed out after 10 minutes")]
|
||||
Timeout,
|
||||
|
||||
#[error("Failed to discover Claude Code sessions: {0}")]
|
||||
SessionDiscoveryFailed(String),
|
||||
|
||||
#[error("Failed to parse JSONL file: {0}")]
|
||||
JsonlParseFailed(String),
|
||||
}
|
||||
229
crates/review/src/github.rs
Normal file
229
crates/review/src/github.rs
Normal file
@@ -0,0 +1,229 @@
|
||||
use std::{path::Path, process::Command};
|
||||
|
||||
use serde::Deserialize;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::error::ReviewError;
|
||||
|
||||
/// Information about a pull request
|
||||
#[derive(Debug)]
|
||||
pub struct PrInfo {
|
||||
pub owner: String,
|
||||
pub repo: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub base_commit: String,
|
||||
pub head_commit: String,
|
||||
pub head_ref_name: String,
|
||||
}
|
||||
|
||||
/// Response from `gh pr view --json`
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct GhPrView {
|
||||
title: String,
|
||||
body: String,
|
||||
base_ref_oid: String,
|
||||
head_ref_oid: String,
|
||||
head_ref_name: String,
|
||||
}
|
||||
|
||||
/// Parse a GitHub PR URL to extract owner, repo, and PR number
|
||||
///
|
||||
/// Expected format: https://github.com/owner/repo/pull/123
|
||||
pub fn parse_pr_url(url: &str) -> Result<(String, String, i64), ReviewError> {
|
||||
let url = url.trim();
|
||||
|
||||
// Remove trailing slashes
|
||||
let url = url.trim_end_matches('/');
|
||||
|
||||
// Try to parse as URL
|
||||
let parts: Vec<&str> = url.split('/').collect();
|
||||
|
||||
// Find the index of "github.com" and then extract owner/repo/pull/number
|
||||
let github_idx = parts
|
||||
.iter()
|
||||
.position(|&p| p == "github.com")
|
||||
.ok_or(ReviewError::InvalidPrUrl)?;
|
||||
|
||||
// We need at least: github.com / owner / repo / pull / number
|
||||
if parts.len() < github_idx + 5 {
|
||||
return Err(ReviewError::InvalidPrUrl);
|
||||
}
|
||||
|
||||
let owner = parts[github_idx + 1].to_string();
|
||||
let repo = parts[github_idx + 2].to_string();
|
||||
|
||||
if parts[github_idx + 3] != "pull" {
|
||||
return Err(ReviewError::InvalidPrUrl);
|
||||
}
|
||||
|
||||
let pr_number: i64 = parts[github_idx + 4]
|
||||
.parse()
|
||||
.map_err(|_| ReviewError::InvalidPrUrl)?;
|
||||
|
||||
if owner.is_empty() || repo.is_empty() || pr_number <= 0 {
|
||||
return Err(ReviewError::InvalidPrUrl);
|
||||
}
|
||||
|
||||
Ok((owner, repo, pr_number))
|
||||
}
|
||||
|
||||
/// Check if the GitHub CLI is installed
|
||||
fn ensure_gh_available() -> Result<(), ReviewError> {
|
||||
let output = Command::new("which")
|
||||
.arg("gh")
|
||||
.output()
|
||||
.map_err(|_| ReviewError::GhNotInstalled)?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(ReviewError::GhNotInstalled);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get PR information using `gh pr view`
|
||||
pub fn get_pr_info(owner: &str, repo: &str, pr_number: i64) -> Result<PrInfo, ReviewError> {
|
||||
ensure_gh_available()?;
|
||||
|
||||
debug!("Fetching PR info for {owner}/{repo}#{pr_number}");
|
||||
|
||||
let output = Command::new("gh")
|
||||
.args([
|
||||
"pr",
|
||||
"view",
|
||||
&pr_number.to_string(),
|
||||
"--repo",
|
||||
&format!("{owner}/{repo}"),
|
||||
"--json",
|
||||
"title,body,baseRefOid,headRefOid,headRefName",
|
||||
])
|
||||
.output()
|
||||
.map_err(|e| ReviewError::PrInfoFailed(e.to_string()))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let lower = stderr.to_ascii_lowercase();
|
||||
|
||||
if lower.contains("authentication")
|
||||
|| lower.contains("gh auth login")
|
||||
|| lower.contains("unauthorized")
|
||||
{
|
||||
return Err(ReviewError::GhNotAuthenticated);
|
||||
}
|
||||
|
||||
return Err(ReviewError::PrInfoFailed(stderr.to_string()));
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let pr_view: GhPrView =
|
||||
serde_json::from_str(&stdout).map_err(|e| ReviewError::PrInfoFailed(e.to_string()))?;
|
||||
|
||||
Ok(PrInfo {
|
||||
owner: owner.to_string(),
|
||||
repo: repo.to_string(),
|
||||
title: pr_view.title,
|
||||
description: pr_view.body,
|
||||
base_commit: pr_view.base_ref_oid,
|
||||
head_commit: pr_view.head_ref_oid,
|
||||
head_ref_name: pr_view.head_ref_name,
|
||||
})
|
||||
}
|
||||
|
||||
/// Clone a repository using `gh repo clone`
|
||||
pub fn clone_repo(owner: &str, repo: &str, target_dir: &Path) -> Result<(), ReviewError> {
|
||||
ensure_gh_available()?;
|
||||
|
||||
debug!("Cloning {owner}/{repo} to {}", target_dir.display());
|
||||
|
||||
let output = Command::new("gh")
|
||||
.args([
|
||||
"repo",
|
||||
"clone",
|
||||
&format!("{owner}/{repo}"),
|
||||
target_dir
|
||||
.to_str()
|
||||
.ok_or_else(|| ReviewError::CloneFailed("Invalid target path".to_string()))?,
|
||||
])
|
||||
.output()
|
||||
.map_err(|e| ReviewError::CloneFailed(e.to_string()))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(ReviewError::CloneFailed(stderr.to_string()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Checkout a specific commit by SHA
|
||||
///
|
||||
/// This is more reliable than `gh pr checkout` because it works even when
|
||||
/// the PR's branch has been deleted (common for merged PRs).
|
||||
pub fn checkout_commit(commit_sha: &str, repo_dir: &Path) -> Result<(), ReviewError> {
|
||||
debug!("Fetching commit {commit_sha} in {}", repo_dir.display());
|
||||
|
||||
// First, fetch the specific commit
|
||||
let output = Command::new("git")
|
||||
.args(["fetch", "origin", commit_sha])
|
||||
.current_dir(repo_dir)
|
||||
.output()
|
||||
.map_err(|e| ReviewError::CheckoutFailed(e.to_string()))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(ReviewError::CheckoutFailed(format!(
|
||||
"Failed to fetch commit: {stderr}"
|
||||
)));
|
||||
}
|
||||
|
||||
debug!("Checking out commit {commit_sha}");
|
||||
|
||||
// Then checkout the commit
|
||||
let output = Command::new("git")
|
||||
.args(["checkout", commit_sha])
|
||||
.current_dir(repo_dir)
|
||||
.output()
|
||||
.map_err(|e| ReviewError::CheckoutFailed(e.to_string()))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(ReviewError::CheckoutFailed(format!(
|
||||
"Failed to checkout commit: {stderr}"
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_pr_url_valid() {
|
||||
let (owner, repo, pr) = parse_pr_url("https://github.com/anthropics/claude-code/pull/123")
|
||||
.expect("Should parse valid URL");
|
||||
assert_eq!(owner, "anthropics");
|
||||
assert_eq!(repo, "claude-code");
|
||||
assert_eq!(pr, 123);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_pr_url_with_trailing_slash() {
|
||||
let (owner, repo, pr) =
|
||||
parse_pr_url("https://github.com/owner/repo/pull/456/").expect("Should parse");
|
||||
assert_eq!(owner, "owner");
|
||||
assert_eq!(repo, "repo");
|
||||
assert_eq!(pr, 456);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_pr_url_invalid_format() {
|
||||
assert!(parse_pr_url("https://github.com/owner/repo").is_err());
|
||||
assert!(parse_pr_url("https://github.com/owner/repo/issues/123").is_err());
|
||||
assert!(parse_pr_url("https://gitlab.com/owner/repo/pull/123").is_err());
|
||||
assert!(parse_pr_url("not a url").is_err());
|
||||
}
|
||||
}
|
||||
255
crates/review/src/main.rs
Normal file
255
crates/review/src/main.rs
Normal file
@@ -0,0 +1,255 @@
|
||||
mod api;
|
||||
mod archive;
|
||||
mod claude_session;
|
||||
mod config;
|
||||
mod error;
|
||||
mod github;
|
||||
mod session_selector;
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use api::{ReviewApiClient, ReviewStatus, StartRequest};
|
||||
use clap::Parser;
|
||||
use error::ReviewError;
|
||||
use github::{checkout_commit, clone_repo, get_pr_info, parse_pr_url};
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use tempfile::TempDir;
|
||||
use tracing::debug;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
const DEFAULT_API_URL: &str = "https://api.dev.vibekanban.com";
|
||||
const POLL_INTERVAL: Duration = Duration::from_secs(10);
|
||||
const TIMEOUT: Duration = Duration::from_secs(600); // 10 minutes
|
||||
|
||||
const BANNER: &str = r#"
|
||||
██████╗ ███████╗██╗ ██╗██╗███████╗██╗ ██╗ ███████╗ █████╗ ███████╗████████╗
|
||||
██╔══██╗██╔════╝██║ ██║██║██╔════╝██║ ██║ ██╔════╝██╔══██╗██╔════╝╚══██╔══╝
|
||||
██████╔╝█████╗ ██║ ██║██║█████╗ ██║ █╗ ██║ █████╗ ███████║███████╗ ██║
|
||||
██╔══██╗██╔══╝ ╚██╗ ██╔╝██║██╔══╝ ██║███╗██║ ██╔══╝ ██╔══██║╚════██║ ██║
|
||||
██║ ██║███████╗ ╚████╔╝ ██║███████╗╚███╔███╔╝██╗██║ ██║ ██║███████║ ██║
|
||||
╚═╝ ╚═╝╚══════╝ ╚═══╝ ╚═╝╚══════╝ ╚══╝╚══╝ ╚═╝╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═╝
|
||||
|
||||
"#;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "review")]
|
||||
#[command(
|
||||
about = "Vibe-Kanban Review helps you review GitHub pull requests by turning them into a clear, story-driven summary instead of a wall of diffs. You provide a pull request URL, optionally link a Claude Code project for additional context, and it builds a narrative that highlights key events and important decisions, helping you prioritise what actually needs attention. It's particularly useful when reviewing large amounts of AI-generated code. Note that code is uploaded to and processed on Vibe-Kanban servers using AI."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
/// GitHub PR URL (e.g., https://github.com/owner/repo/pull/123)
|
||||
pr_url: String,
|
||||
|
||||
/// Enable verbose output
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
verbose: bool,
|
||||
|
||||
/// API base URL
|
||||
#[arg(long, env = "REVIEW_API_URL", default_value = DEFAULT_API_URL)]
|
||||
api_url: String,
|
||||
}
|
||||
|
||||
fn show_disclaimer() {
|
||||
println!();
|
||||
println!(
|
||||
"DISCLAIMER: Your code will be processed on our secure remote servers, all artefacts (code, AI logs, etc...) will be deleted after 14 days."
|
||||
);
|
||||
println!();
|
||||
println!("Full terms and conditions and privacy policy: https://review.fast/terms");
|
||||
println!();
|
||||
println!("Press Enter to accept and continue...");
|
||||
|
||||
let mut input = String::new();
|
||||
std::io::stdin().read_line(&mut input).ok();
|
||||
}
|
||||
|
||||
fn prompt_email(config: &mut config::Config) -> String {
|
||||
use dialoguer::Input;
|
||||
|
||||
let mut input: Input<String> =
|
||||
Input::new().with_prompt("Email address (we'll send a link to the review here, no spam)");
|
||||
|
||||
if let Some(ref saved_email) = config.email {
|
||||
input = input.default(saved_email.clone());
|
||||
}
|
||||
|
||||
let email: String = input.interact_text().expect("Failed to read email");
|
||||
|
||||
// Save email for next time
|
||||
config.email = Some(email.clone());
|
||||
if let Err(e) = config.save() {
|
||||
debug!("Failed to save config: {}", e);
|
||||
}
|
||||
|
||||
email
|
||||
}
|
||||
|
||||
fn create_spinner(message: &str) -> ProgressBar {
|
||||
let spinner = ProgressBar::new_spinner();
|
||||
spinner.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.expect("Invalid spinner template"),
|
||||
);
|
||||
spinner.set_message(message.to_string());
|
||||
spinner.enable_steady_tick(Duration::from_millis(100));
|
||||
spinner
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
|
||||
// Initialize tracing
|
||||
let filter = if args.verbose {
|
||||
EnvFilter::new("debug")
|
||||
} else {
|
||||
EnvFilter::new("warn")
|
||||
};
|
||||
tracing_subscriber::fmt().with_env_filter(filter).init();
|
||||
|
||||
println!("{}", BANNER);
|
||||
|
||||
show_disclaimer();
|
||||
|
||||
debug!("Args: {:?}", args);
|
||||
|
||||
// Run the main flow and handle errors
|
||||
if let Err(e) = run(args).await {
|
||||
eprintln!("Error: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run(args: Args) -> Result<(), ReviewError> {
|
||||
// 1. Load config and prompt for email
|
||||
let mut config = config::Config::load();
|
||||
let email = prompt_email(&mut config);
|
||||
|
||||
// 2. Parse PR URL
|
||||
let spinner = create_spinner("Parsing PR URL...");
|
||||
let (owner, repo, pr_number) = parse_pr_url(&args.pr_url)?;
|
||||
spinner.finish_with_message(format!("PR: {owner}/{repo}#{pr_number}"));
|
||||
|
||||
// 3. Get PR info
|
||||
let spinner = create_spinner("Fetching PR information...");
|
||||
let pr_info = get_pr_info(&owner, &repo, pr_number)?;
|
||||
spinner.finish_with_message(format!("PR: {}", pr_info.title));
|
||||
|
||||
// 4. Select Claude Code session (optional)
|
||||
let session_files = match session_selector::select_session(&pr_info.head_ref_name) {
|
||||
Ok(session_selector::SessionSelection::Selected(files)) => {
|
||||
println!(" Selected {} session file(s)", files.len());
|
||||
Some(files)
|
||||
}
|
||||
Ok(session_selector::SessionSelection::Skipped) => {
|
||||
println!(" Skipping project attachment");
|
||||
None
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Session selection error: {}", e);
|
||||
println!(" No sessions found");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// 5. Clone repository to temp directory
|
||||
let temp_dir = TempDir::new().map_err(|e| ReviewError::CloneFailed(e.to_string()))?;
|
||||
let repo_dir = temp_dir.path().join(&repo);
|
||||
|
||||
let spinner = create_spinner("Cloning repository...");
|
||||
clone_repo(&owner, &repo, &repo_dir)?;
|
||||
spinner.finish_with_message("Repository cloned");
|
||||
|
||||
// 6. Checkout PR head commit
|
||||
let spinner = create_spinner("Checking out PR...");
|
||||
checkout_commit(&pr_info.head_commit, &repo_dir)?;
|
||||
spinner.finish_with_message("PR checked out");
|
||||
|
||||
// 7. Create tarball (with optional session data)
|
||||
let spinner = create_spinner("Creating archive...");
|
||||
|
||||
// If sessions were selected, write .agent-messages.json to repo root
|
||||
if let Some(ref files) = session_files {
|
||||
let json_content = claude_session::concatenate_sessions_to_json(files)?;
|
||||
let agent_messages_path = repo_dir.join(".agent-messages.json");
|
||||
std::fs::write(&agent_messages_path, json_content)
|
||||
.map_err(|e| ReviewError::ArchiveFailed(e.to_string()))?;
|
||||
}
|
||||
|
||||
let payload = archive::create_tarball(&repo_dir)?;
|
||||
let size_mb = payload.len() as f64 / 1_048_576.0;
|
||||
spinner.finish_with_message(format!("Archive created ({size_mb:.2} MB)"));
|
||||
|
||||
// 8. Initialize review
|
||||
let client = ReviewApiClient::new(args.api_url.clone());
|
||||
let spinner = create_spinner("Initializing review...");
|
||||
let init_response = client.init(&args.pr_url, &email, &pr_info.title).await?;
|
||||
spinner.finish_with_message(format!("Review ID: {}", init_response.review_id));
|
||||
|
||||
// 9. Upload archive
|
||||
let spinner = create_spinner("Uploading archive...");
|
||||
client.upload(&init_response.upload_url, payload).await?;
|
||||
spinner.finish_with_message("Upload complete");
|
||||
|
||||
// 10. Start review
|
||||
let spinner = create_spinner("Starting review...");
|
||||
let codebase_url = format!("r2://{}", init_response.object_key);
|
||||
client
|
||||
.start(StartRequest {
|
||||
id: init_response.review_id.to_string(),
|
||||
title: pr_info.title,
|
||||
description: pr_info.description,
|
||||
org: pr_info.owner,
|
||||
repo: pr_info.repo,
|
||||
codebase_url,
|
||||
base_commit: pr_info.base_commit,
|
||||
})
|
||||
.await?;
|
||||
spinner.finish_with_message(format!("Review started, we'll send you an email at {} when the review is ready. This can take a few minutes, you may now close the terminal", email));
|
||||
|
||||
// 11. Poll for completion
|
||||
let spinner = create_spinner("Review in progress...");
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
loop {
|
||||
tokio::time::sleep(POLL_INTERVAL).await;
|
||||
|
||||
// Check for timeout
|
||||
if start_time.elapsed() > TIMEOUT {
|
||||
spinner.finish_with_message("Timed out");
|
||||
return Err(ReviewError::Timeout);
|
||||
}
|
||||
|
||||
let status = client
|
||||
.poll_status(&init_response.review_id.to_string())
|
||||
.await?;
|
||||
|
||||
match status.status {
|
||||
ReviewStatus::Completed => {
|
||||
spinner.finish_with_message("Review completed!");
|
||||
break;
|
||||
}
|
||||
ReviewStatus::Failed => {
|
||||
spinner.finish_with_message("Review failed");
|
||||
let error_msg = status.error.unwrap_or_else(|| "Unknown error".to_string());
|
||||
return Err(ReviewError::ReviewFailed(error_msg));
|
||||
}
|
||||
_ => {
|
||||
let progress = status.progress.unwrap_or_else(|| status.status.to_string());
|
||||
spinner.set_message(format!("Review in progress: {progress}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 12. Print result URL
|
||||
let review_url = client.review_url(&init_response.review_id.to_string());
|
||||
println!("\nReview available at:");
|
||||
println!(" {review_url}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
173
crates/review/src/session_selector.rs
Normal file
173
crates/review/src/session_selector.rs
Normal file
@@ -0,0 +1,173 @@
|
||||
use std::{path::PathBuf, time::SystemTime};
|
||||
|
||||
use dialoguer::{Select, theme::ColorfulTheme};
|
||||
use tracing::debug;
|
||||
|
||||
use crate::{
|
||||
claude_session::{
|
||||
ClaudeProject, discover_projects, discover_sessions, find_projects_by_branch,
|
||||
},
|
||||
error::ReviewError,
|
||||
};
|
||||
|
||||
/// Result of session selection process
|
||||
pub enum SessionSelection {
|
||||
/// User selected session files to include (all sessions from a project)
|
||||
Selected(Vec<PathBuf>),
|
||||
/// User chose to skip session attachment
|
||||
Skipped,
|
||||
}
|
||||
|
||||
/// Prompt user to select a Claude Code project
|
||||
///
|
||||
/// Flow:
|
||||
/// 1. Try auto-match by branch name
|
||||
/// 2. If match found, confirm with user
|
||||
/// 3. If no match or user declines, show scrollable project list
|
||||
/// 4. Allow user to skip entirely
|
||||
///
|
||||
/// When a project is selected, ALL sessions from that project are included.
|
||||
pub fn select_session(pr_branch: &str) -> Result<SessionSelection, ReviewError> {
|
||||
debug!(
|
||||
"Looking for Claude Code projects matching branch: {}",
|
||||
pr_branch
|
||||
);
|
||||
|
||||
let projects = discover_projects()?;
|
||||
|
||||
if projects.is_empty() {
|
||||
debug!("No Claude Code projects found");
|
||||
return Ok(SessionSelection::Skipped);
|
||||
}
|
||||
|
||||
// Try auto-match by branch
|
||||
let matches = find_projects_by_branch(&projects, pr_branch)?;
|
||||
|
||||
if !matches.is_empty() {
|
||||
// Found a matching project, ask for confirmation
|
||||
let (project, sessions) = &matches[0];
|
||||
|
||||
println!();
|
||||
println!();
|
||||
println!(
|
||||
"Found matching Claude Code project for branch '{}'",
|
||||
pr_branch
|
||||
);
|
||||
println!(" Project: {}", project.name);
|
||||
if let Some(ref prompt) = project.first_prompt {
|
||||
println!(" \"{}\"", prompt);
|
||||
}
|
||||
println!(
|
||||
" {} session{} · Last modified: {}",
|
||||
project.session_count,
|
||||
if project.session_count == 1 { "" } else { "s" },
|
||||
format_time_ago(project.modified_at)
|
||||
);
|
||||
println!();
|
||||
|
||||
let selection = Select::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("Use this project to improve review quality?")
|
||||
.items(&[
|
||||
"Yes, use this project",
|
||||
"No, choose a different project",
|
||||
"Skip (generate review from just code changes)",
|
||||
])
|
||||
.default(0)
|
||||
.interact()
|
||||
.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
|
||||
match selection {
|
||||
0 => {
|
||||
// Yes, use all sessions from this project
|
||||
let paths: Vec<PathBuf> = sessions.iter().map(|s| s.path.clone()).collect();
|
||||
return Ok(SessionSelection::Selected(paths));
|
||||
}
|
||||
2 => {
|
||||
// Skip
|
||||
return Ok(SessionSelection::Skipped);
|
||||
}
|
||||
_ => {
|
||||
// Fall through to manual selection
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Manual selection: select a project
|
||||
select_project(&projects)
|
||||
}
|
||||
|
||||
/// Manual project selection - returns all sessions from selected project
|
||||
fn select_project(projects: &[ClaudeProject]) -> Result<SessionSelection, ReviewError> {
|
||||
// Build project list with rich metadata
|
||||
let mut items: Vec<String> = Vec::new();
|
||||
items.push("Skip (no project)\n".to_string());
|
||||
items.extend(projects.iter().map(format_project_item));
|
||||
items.push("Skip (no project)\n".to_string());
|
||||
|
||||
println!();
|
||||
println!();
|
||||
let selection = Select::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("Select a Claude Code project to improve review quality")
|
||||
.items(&items)
|
||||
.default(0)
|
||||
.max_length(5)
|
||||
.interact()
|
||||
.map_err(|e| ReviewError::SessionDiscoveryFailed(e.to_string()))?;
|
||||
|
||||
// Skip option
|
||||
if selection == 0 || selection == items.len() - 1 {
|
||||
return Ok(SessionSelection::Skipped);
|
||||
}
|
||||
|
||||
let project = &projects[selection];
|
||||
let sessions = discover_sessions(project)?;
|
||||
|
||||
// Return all session paths from this project
|
||||
let paths: Vec<PathBuf> = sessions.iter().map(|s| s.path.clone()).collect();
|
||||
Ok(SessionSelection::Selected(paths))
|
||||
}
|
||||
|
||||
/// Format a project item for display in the selection list
|
||||
fn format_project_item(project: &ClaudeProject) -> String {
|
||||
let prompt_line = project
|
||||
.first_prompt
|
||||
.as_ref()
|
||||
.map(|p| format!("\n \"{}\"", p))
|
||||
.unwrap_or_default();
|
||||
|
||||
let branch = project
|
||||
.git_branch
|
||||
.as_ref()
|
||||
.map(|b| format!("branch: {}", b))
|
||||
.unwrap_or_else(|| "no branch".to_string());
|
||||
|
||||
format!(
|
||||
"{}{}\n {} · {} session{} · {}\n",
|
||||
project.name,
|
||||
prompt_line,
|
||||
branch,
|
||||
project.session_count,
|
||||
if project.session_count == 1 { "" } else { "s" },
|
||||
format_time_ago(project.modified_at)
|
||||
)
|
||||
}
|
||||
|
||||
/// Format a SystemTime as a human-readable "time ago" string
|
||||
fn format_time_ago(time: SystemTime) -> String {
|
||||
let now = SystemTime::now();
|
||||
let duration = now.duration_since(time).unwrap_or_default();
|
||||
let secs = duration.as_secs();
|
||||
|
||||
if secs < 60 {
|
||||
"just now".to_string()
|
||||
} else if secs < 3600 {
|
||||
let mins = secs / 60;
|
||||
format!("{} minute{} ago", mins, if mins == 1 { "" } else { "s" })
|
||||
} else if secs < 86400 {
|
||||
let hours = secs / 3600;
|
||||
format!("{} hour{} ago", hours, if hours == 1 { "" } else { "s" })
|
||||
} else {
|
||||
let days = secs / 86400;
|
||||
format!("{} day{} ago", days, if days == 1 { "" } else { "s" })
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,14 @@ zip -q vibe-kanban-mcp.zip vibe-kanban-mcp
|
||||
rm -f vibe-kanban-mcp
|
||||
mv vibe-kanban-mcp.zip npx-cli/dist/macos-arm64/vibe-kanban-mcp.zip
|
||||
|
||||
# Copy the Review CLI binary
|
||||
cp target/release/review vibe-kanban-review
|
||||
zip -q vibe-kanban-review.zip vibe-kanban-review
|
||||
rm -f vibe-kanban-review
|
||||
mv vibe-kanban-review.zip npx-cli/dist/macos-arm64/vibe-kanban-review.zip
|
||||
|
||||
echo "✅ NPM package ready!"
|
||||
echo "📁 Files created:"
|
||||
echo " - npx-cli/dist/macos-arm64/vibe-kanban.zip"
|
||||
echo " - npx-cli/dist/macos-arm64/vibe-kanban-mcp.zip"
|
||||
echo " - npx-cli/dist/macos-arm64/vibe-kanban-review.zip"
|
||||
|
||||
@@ -69,7 +69,9 @@ function getBinaryName(base) {
|
||||
|
||||
const platformDir = getPlatformDir();
|
||||
const extractDir = path.join(__dirname, "..", "dist", platformDir);
|
||||
const args = process.argv.slice(2);
|
||||
const isMcpMode = process.argv.includes("--mcp");
|
||||
const isReviewMode = args[0] === "review";
|
||||
|
||||
// ensure output dir
|
||||
fs.mkdirSync(extractDir, { recursive: true });
|
||||
@@ -142,6 +144,17 @@ if (isMcpMode) {
|
||||
});
|
||||
process.on("SIGTERM", () => proc.kill("SIGTERM"));
|
||||
});
|
||||
} else if (isReviewMode) {
|
||||
extractAndRun("vibe-kanban-review", (bin) => {
|
||||
// Pass all args except 'review' to the binary
|
||||
const reviewArgs = args.slice(1);
|
||||
const proc = spawn(bin, reviewArgs, { stdio: "inherit" });
|
||||
proc.on("exit", (c) => process.exit(c || 0));
|
||||
proc.on("error", (e) => {
|
||||
console.error("❌ Review CLI error:", e.message);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
console.log(`📦 Extracting vibe-kanban...`);
|
||||
extractAndRun("vibe-kanban", (bin) => {
|
||||
|
||||
21
pnpm-lock.yaml
generated
21
pnpm-lock.yaml
generated
@@ -270,6 +270,24 @@ importers:
|
||||
|
||||
remote-frontend:
|
||||
dependencies:
|
||||
'@git-diff-view/file':
|
||||
specifier: ^0.0.30
|
||||
version: 0.0.30
|
||||
'@git-diff-view/react':
|
||||
specifier: ^0.0.30
|
||||
version: 0.0.30(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
clsx:
|
||||
specifier: ^2.1.1
|
||||
version: 2.1.1
|
||||
highlight.js:
|
||||
specifier: ^11.11.1
|
||||
version: 11.11.1
|
||||
posthog-js:
|
||||
specifier: ^1.283.0
|
||||
version: 1.283.0
|
||||
prettier:
|
||||
specifier: ^3.6.1
|
||||
version: 3.6.1
|
||||
react:
|
||||
specifier: ^18.2.0
|
||||
version: 18.3.1
|
||||
@@ -279,6 +297,9 @@ importers:
|
||||
react-router-dom:
|
||||
specifier: ^7.9.5
|
||||
version: 7.9.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
tailwind-merge:
|
||||
specifier: ^2.6.0
|
||||
version: 2.6.0
|
||||
devDependencies:
|
||||
'@types/react':
|
||||
specifier: ^18.2.43
|
||||
|
||||
@@ -3,3 +3,7 @@ VITE_API_BASE_URL=http://localhost:3000
|
||||
|
||||
# App base URL (frontend)
|
||||
VITE_APP_BASE_URL=http://localhost:3000
|
||||
|
||||
# PostHog analytics
|
||||
VITE_PUBLIC_POSTHOG_KEY=
|
||||
VITE_PUBLIC_POSTHOG_HOST=
|
||||
@@ -3,6 +3,8 @@
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="robots" content="noindex, nofollow" />
|
||||
<link rel="icon" type="image/png" href="/favicon.png" />
|
||||
<title>Vibe Kanban Remote</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -6,12 +6,20 @@
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
"preview": "vite preview"
|
||||
"preview": "vite preview",
|
||||
"format": "prettier --write \"src/**/*.{ts,tsx,js,jsx,json,css,md}\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@git-diff-view/file": "^0.0.30",
|
||||
"@git-diff-view/react": "^0.0.30",
|
||||
"clsx": "^2.1.1",
|
||||
"highlight.js": "^11.11.1",
|
||||
"posthog-js": "^1.283.0",
|
||||
"prettier": "^3.6.1",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-router-dom": "^7.9.5"
|
||||
"react-router-dom": "^7.9.5",
|
||||
"tailwind-merge": "^2.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.2.43",
|
||||
@@ -23,4 +31,4 @@
|
||||
"typescript": "^5.9.2",
|
||||
"vite": "^5.0.8"
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
remote-frontend/public/favicon.png
Normal file
BIN
remote-frontend/public/favicon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 13 KiB |
BIN
remote-frontend/public/logo_light.png
Normal file
BIN
remote-frontend/public/logo_light.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
3
remote-frontend/public/review_fast_logo_dark.svg
Normal file
3
remote-frontend/public/review_fast_logo_dark.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 7.1 KiB |
2
remote-frontend/public/robots.txt
Normal file
2
remote-frontend/public/robots.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
User-agent: *
|
||||
Disallow: /
|
||||
@@ -3,10 +3,9 @@ function App() {
|
||||
<div className="min-h-screen bg-gray-900 text-white flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<h1 className="text-4xl font-bold mb-4">Vibe Kanban Remote</h1>
|
||||
<p className="text-gray-400">Frontend coming soon...</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export default App
|
||||
export default App;
|
||||
|
||||
@@ -1,28 +1,48 @@
|
||||
import { createBrowserRouter, RouterProvider } from 'react-router-dom'
|
||||
import HomePage from './pages/HomePage'
|
||||
import InvitationPage from './pages/InvitationPage'
|
||||
import InvitationCompletePage from './pages/InvitationCompletePage'
|
||||
import NotFoundPage from './pages/NotFoundPage'
|
||||
import { createBrowserRouter, RouterProvider } from "react-router-dom";
|
||||
import HomePage from "./pages/HomePage";
|
||||
import InvitationPage from "./pages/InvitationPage";
|
||||
import InvitationCompletePage from "./pages/InvitationCompletePage";
|
||||
import ReviewPage from "./pages/ReviewPage";
|
||||
import AccountPage from "./pages/AccountPage";
|
||||
import AccountCompletePage from "./pages/AccountCompletePage";
|
||||
import OrganizationPage from "./pages/OrganizationPage";
|
||||
import NotFoundPage from "./pages/NotFoundPage";
|
||||
|
||||
const router = createBrowserRouter([
|
||||
{
|
||||
path: '/',
|
||||
path: "/",
|
||||
element: <HomePage />,
|
||||
},
|
||||
{
|
||||
path: '/invitations/:token/accept',
|
||||
path: "/review/:id",
|
||||
element: <ReviewPage />,
|
||||
},
|
||||
{
|
||||
path: "/invitations/:token/accept",
|
||||
element: <InvitationPage />,
|
||||
},
|
||||
{
|
||||
path: '/invitations/:token/complete',
|
||||
path: "/invitations/:token/complete",
|
||||
element: <InvitationCompletePage />,
|
||||
},
|
||||
{
|
||||
path: '*',
|
||||
path: "/account",
|
||||
element: <AccountPage />,
|
||||
},
|
||||
{
|
||||
path: "/account/complete",
|
||||
element: <AccountCompletePage />,
|
||||
},
|
||||
{
|
||||
path: "/account/organizations/:orgId",
|
||||
element: <OrganizationPage />,
|
||||
},
|
||||
{
|
||||
path: "*",
|
||||
element: <NotFoundPage />,
|
||||
},
|
||||
])
|
||||
]);
|
||||
|
||||
export default function AppRouter() {
|
||||
return <RouterProvider router={router} />
|
||||
return <RouterProvider router={router} />;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,79 @@
|
||||
import type { ReviewResult } from "./types/review";
|
||||
import {
|
||||
getAccessToken,
|
||||
getRefreshToken,
|
||||
storeTokens,
|
||||
clearTokens,
|
||||
} from "./auth";
|
||||
|
||||
const API_BASE = import.meta.env.VITE_API_BASE_URL || "";
|
||||
|
||||
// Types for account management
|
||||
export type MemberRole = "ADMIN" | "MEMBER";
|
||||
|
||||
export type ProviderProfile = {
|
||||
provider: string;
|
||||
username: string | null;
|
||||
display_name: string | null;
|
||||
email: string | null;
|
||||
avatar_url: string | null;
|
||||
};
|
||||
|
||||
export type ProfileResponse = {
|
||||
user_id: string;
|
||||
username: string | null;
|
||||
email: string;
|
||||
providers: ProviderProfile[];
|
||||
};
|
||||
|
||||
export type Organization = {
|
||||
id: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
is_personal: boolean;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
};
|
||||
|
||||
export type OrganizationWithRole = Organization & {
|
||||
user_role: MemberRole;
|
||||
};
|
||||
|
||||
export type OrganizationMemberWithProfile = {
|
||||
user_id: string;
|
||||
role: MemberRole;
|
||||
joined_at: string;
|
||||
first_name: string | null;
|
||||
last_name: string | null;
|
||||
username: string | null;
|
||||
email: string | null;
|
||||
avatar_url: string | null;
|
||||
};
|
||||
|
||||
export type InvitationStatus = "PENDING" | "ACCEPTED" | "DECLINED" | "EXPIRED";
|
||||
|
||||
export type OrganizationInvitation = {
|
||||
id: string;
|
||||
organization_id: string;
|
||||
invited_by_user_id: string | null;
|
||||
email: string;
|
||||
role: MemberRole;
|
||||
status: InvitationStatus;
|
||||
token: string;
|
||||
created_at: string;
|
||||
expires_at: string;
|
||||
};
|
||||
|
||||
export type CreateOrganizationRequest = {
|
||||
name: string;
|
||||
slug: string;
|
||||
};
|
||||
|
||||
export type GetOrganizationResponse = {
|
||||
organization: Organization;
|
||||
user_role: string;
|
||||
};
|
||||
|
||||
export type Invitation = {
|
||||
id: string;
|
||||
organization_slug: string;
|
||||
@@ -90,3 +164,380 @@ export async function acceptInvitation(
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function getReview(reviewId: string): Promise<ReviewResult> {
|
||||
const res = await fetch(`${API_BASE}/v1/review/${reviewId}`);
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) {
|
||||
throw new Error("Review not found");
|
||||
}
|
||||
throw new Error(`Failed to fetch review (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function getFileContent(
|
||||
reviewId: string,
|
||||
fileHash: string,
|
||||
): Promise<string> {
|
||||
const res = await fetch(`${API_BASE}/v1/review/${reviewId}/file/${fileHash}`);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch file (${res.status})`);
|
||||
}
|
||||
return res.text();
|
||||
}
|
||||
|
||||
export async function getDiff(reviewId: string): Promise<string> {
|
||||
const res = await fetch(`${API_BASE}/v1/review/${reviewId}/diff`);
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) {
|
||||
return "";
|
||||
}
|
||||
throw new Error(`Failed to fetch diff (${res.status})`);
|
||||
}
|
||||
return res.text();
|
||||
}
|
||||
|
||||
export interface ReviewMetadata {
|
||||
gh_pr_url: string;
|
||||
pr_title: string;
|
||||
}
|
||||
|
||||
export async function getReviewMetadata(
|
||||
reviewId: string,
|
||||
): Promise<ReviewMetadata> {
|
||||
const res = await fetch(`${API_BASE}/v1/review/${reviewId}/metadata`);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch review metadata (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
// Token refresh
|
||||
export async function refreshTokens(
|
||||
refreshToken: string,
|
||||
): Promise<{ access_token: string; refresh_token: string }> {
|
||||
const res = await fetch(`${API_BASE}/v1/tokens/refresh`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ refresh_token: refreshToken }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
throw new Error(`Token refresh failed (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
// Authenticated fetch wrapper with automatic token refresh
|
||||
let isRefreshing = false;
|
||||
let refreshPromise: Promise<string> | null = null;
|
||||
|
||||
async function getValidAccessToken(): Promise<string> {
|
||||
const accessToken = getAccessToken();
|
||||
if (!accessToken) {
|
||||
throw new Error("Not authenticated");
|
||||
}
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
async function handleTokenRefresh(): Promise<string> {
|
||||
if (isRefreshing && refreshPromise) {
|
||||
return refreshPromise;
|
||||
}
|
||||
|
||||
const refreshToken = getRefreshToken();
|
||||
if (!refreshToken) {
|
||||
clearTokens();
|
||||
throw new Error("No refresh token available");
|
||||
}
|
||||
|
||||
isRefreshing = true;
|
||||
refreshPromise = (async () => {
|
||||
try {
|
||||
const tokens = await refreshTokens(refreshToken);
|
||||
storeTokens(tokens.access_token, tokens.refresh_token);
|
||||
return tokens.access_token;
|
||||
} catch {
|
||||
clearTokens();
|
||||
throw new Error("Session expired");
|
||||
} finally {
|
||||
isRefreshing = false;
|
||||
refreshPromise = null;
|
||||
}
|
||||
})();
|
||||
|
||||
return refreshPromise;
|
||||
}
|
||||
|
||||
export async function authenticatedFetch(
|
||||
url: string,
|
||||
options: RequestInit = {},
|
||||
): Promise<Response> {
|
||||
const accessToken = await getValidAccessToken();
|
||||
|
||||
const res = await fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
...options.headers,
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (res.status === 401) {
|
||||
// Try to refresh the token
|
||||
const newAccessToken = await handleTokenRefresh();
|
||||
return fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
...options.headers,
|
||||
Authorization: `Bearer ${newAccessToken}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
// Profile APIs
|
||||
export async function getProfile(): Promise<ProfileResponse> {
|
||||
const res = await authenticatedFetch(`${API_BASE}/v1/profile`);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch profile (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function logout(): Promise<void> {
|
||||
try {
|
||||
await authenticatedFetch(`${API_BASE}/v1/oauth/logout`, {
|
||||
method: "POST",
|
||||
});
|
||||
} finally {
|
||||
clearTokens();
|
||||
}
|
||||
}
|
||||
|
||||
// Organization APIs
|
||||
export async function listOrganizations(): Promise<OrganizationWithRole[]> {
|
||||
const res = await authenticatedFetch(`${API_BASE}/v1/organizations`);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch organizations (${res.status})`);
|
||||
}
|
||||
const data = await res.json();
|
||||
return data.organizations;
|
||||
}
|
||||
|
||||
export async function createOrganization(
|
||||
data: CreateOrganizationRequest,
|
||||
): Promise<OrganizationWithRole> {
|
||||
const res = await authenticatedFetch(`${API_BASE}/v1/organizations`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.message || `Failed to create organization (${res.status})`);
|
||||
}
|
||||
const result = await res.json();
|
||||
return result.organization;
|
||||
}
|
||||
|
||||
export async function getOrganization(
|
||||
orgId: string,
|
||||
): Promise<GetOrganizationResponse> {
|
||||
const res = await authenticatedFetch(`${API_BASE}/v1/organizations/${orgId}`);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch organization (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function updateOrganization(
|
||||
orgId: string,
|
||||
name: string,
|
||||
): Promise<Organization> {
|
||||
const res = await authenticatedFetch(`${API_BASE}/v1/organizations/${orgId}`, {
|
||||
method: "PATCH",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ name }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.message || `Failed to update organization (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function deleteOrganization(orgId: string): Promise<void> {
|
||||
const res = await authenticatedFetch(`${API_BASE}/v1/organizations/${orgId}`, {
|
||||
method: "DELETE",
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.message || `Failed to delete organization (${res.status})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Organization Members APIs
|
||||
export async function listMembers(
|
||||
orgId: string,
|
||||
): Promise<OrganizationMemberWithProfile[]> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/members`,
|
||||
);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch members (${res.status})`);
|
||||
}
|
||||
const data = await res.json();
|
||||
return data.members;
|
||||
}
|
||||
|
||||
export async function removeMember(
|
||||
orgId: string,
|
||||
userId: string,
|
||||
): Promise<void> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/members/${userId}`,
|
||||
{ method: "DELETE" },
|
||||
);
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.message || `Failed to remove member (${res.status})`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateMemberRole(
|
||||
orgId: string,
|
||||
userId: string,
|
||||
role: MemberRole,
|
||||
): Promise<void> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/members/${userId}/role`,
|
||||
{
|
||||
method: "PATCH",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ role }),
|
||||
},
|
||||
);
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.message || `Failed to update member role (${res.status})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Invitation APIs
|
||||
export async function listInvitations(
|
||||
orgId: string,
|
||||
): Promise<OrganizationInvitation[]> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/invitations`,
|
||||
);
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch invitations (${res.status})`);
|
||||
}
|
||||
const data = await res.json();
|
||||
return data.invitations;
|
||||
}
|
||||
|
||||
export async function createInvitation(
|
||||
orgId: string,
|
||||
email: string,
|
||||
role: MemberRole,
|
||||
): Promise<OrganizationInvitation> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/invitations`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ email, role }),
|
||||
},
|
||||
);
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.message || `Failed to create invitation (${res.status})`);
|
||||
}
|
||||
const data = await res.json();
|
||||
return data.invitation;
|
||||
}
|
||||
|
||||
export async function revokeInvitation(
|
||||
orgId: string,
|
||||
invitationId: string,
|
||||
): Promise<void> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/invitations/revoke`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ invitation_id: invitationId }),
|
||||
},
|
||||
);
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.message || `Failed to revoke invitation (${res.status})`);
|
||||
}
|
||||
}
|
||||
|
||||
// GitHub App Integration Types
|
||||
export type GitHubAppInstallation = {
|
||||
id: string;
|
||||
github_installation_id: number;
|
||||
github_account_login: string;
|
||||
github_account_type: "Organization" | "User";
|
||||
repository_selection: "all" | "selected";
|
||||
suspended_at: string | null;
|
||||
created_at: string;
|
||||
};
|
||||
|
||||
export type GitHubAppRepository = {
|
||||
id: string;
|
||||
github_repo_id: number;
|
||||
repo_full_name: string;
|
||||
};
|
||||
|
||||
export type GitHubAppStatus = {
|
||||
installed: boolean;
|
||||
installation: GitHubAppInstallation | null;
|
||||
repositories: GitHubAppRepository[];
|
||||
};
|
||||
|
||||
export type GitHubAppInstallUrlResponse = {
|
||||
install_url: string;
|
||||
};
|
||||
|
||||
// GitHub App Integration APIs
|
||||
export async function getGitHubAppInstallUrl(
|
||||
orgId: string,
|
||||
): Promise<GitHubAppInstallUrlResponse> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/github-app/install-url`,
|
||||
);
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.error || `Failed to get install URL (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function getGitHubAppStatus(orgId: string): Promise<GitHubAppStatus> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/github-app/status`,
|
||||
);
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.error || `Failed to get GitHub App status (${res.status})`);
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function disconnectGitHubApp(orgId: string): Promise<void> {
|
||||
const res = await authenticatedFetch(
|
||||
`${API_BASE}/v1/organizations/${orgId}/github-app`,
|
||||
{ method: "DELETE" },
|
||||
);
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}));
|
||||
throw new Error(error.error || `Failed to disconnect GitHub App (${res.status})`);
|
||||
}
|
||||
}
|
||||
|
||||
26
remote-frontend/src/auth.ts
Normal file
26
remote-frontend/src/auth.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// Auth token storage utilities using localStorage for persistent sessions
|
||||
|
||||
const ACCESS_TOKEN_KEY = "rf_access_token";
|
||||
const REFRESH_TOKEN_KEY = "rf_refresh_token";
|
||||
|
||||
export function storeTokens(accessToken: string, refreshToken: string): void {
|
||||
localStorage.setItem(ACCESS_TOKEN_KEY, accessToken);
|
||||
localStorage.setItem(REFRESH_TOKEN_KEY, refreshToken);
|
||||
}
|
||||
|
||||
export function getAccessToken(): string | null {
|
||||
return localStorage.getItem(ACCESS_TOKEN_KEY);
|
||||
}
|
||||
|
||||
export function getRefreshToken(): string | null {
|
||||
return localStorage.getItem(REFRESH_TOKEN_KEY);
|
||||
}
|
||||
|
||||
export function clearTokens(): void {
|
||||
localStorage.removeItem(ACCESS_TOKEN_KEY);
|
||||
localStorage.removeItem(REFRESH_TOKEN_KEY);
|
||||
}
|
||||
|
||||
export function isLoggedIn(): boolean {
|
||||
return getAccessToken() !== null && getRefreshToken() !== null;
|
||||
}
|
||||
299
remote-frontend/src/components/CodeFragmentCard.tsx
Normal file
299
remote-frontend/src/components/CodeFragmentCard.tsx
Normal file
@@ -0,0 +1,299 @@
|
||||
import { useMemo, useState } from "react";
|
||||
import hljs from "highlight.js/lib/core";
|
||||
import javascript from "highlight.js/lib/languages/javascript";
|
||||
import typescript from "highlight.js/lib/languages/typescript";
|
||||
import python from "highlight.js/lib/languages/python";
|
||||
import rust from "highlight.js/lib/languages/rust";
|
||||
import go from "highlight.js/lib/languages/go";
|
||||
import java from "highlight.js/lib/languages/java";
|
||||
import css from "highlight.js/lib/languages/css";
|
||||
import json from "highlight.js/lib/languages/json";
|
||||
import xml from "highlight.js/lib/languages/xml";
|
||||
import bash from "highlight.js/lib/languages/bash";
|
||||
import sql from "highlight.js/lib/languages/sql";
|
||||
import yaml from "highlight.js/lib/languages/yaml";
|
||||
import markdown from "highlight.js/lib/languages/markdown";
|
||||
import cpp from "highlight.js/lib/languages/cpp";
|
||||
import csharp from "highlight.js/lib/languages/csharp";
|
||||
import ruby from "highlight.js/lib/languages/ruby";
|
||||
import swift from "highlight.js/lib/languages/swift";
|
||||
import kotlin from "highlight.js/lib/languages/kotlin";
|
||||
import type { CodeFragment } from "../types/review";
|
||||
|
||||
// Register languages
|
||||
hljs.registerLanguage("javascript", javascript);
|
||||
hljs.registerLanguage("typescript", typescript);
|
||||
hljs.registerLanguage("python", python);
|
||||
hljs.registerLanguage("rust", rust);
|
||||
hljs.registerLanguage("go", go);
|
||||
hljs.registerLanguage("java", java);
|
||||
hljs.registerLanguage("css", css);
|
||||
hljs.registerLanguage("json", json);
|
||||
hljs.registerLanguage("xml", xml);
|
||||
hljs.registerLanguage("bash", bash);
|
||||
hljs.registerLanguage("sql", sql);
|
||||
hljs.registerLanguage("yaml", yaml);
|
||||
hljs.registerLanguage("markdown", markdown);
|
||||
hljs.registerLanguage("cpp", cpp);
|
||||
hljs.registerLanguage("csharp", csharp);
|
||||
hljs.registerLanguage("ruby", ruby);
|
||||
hljs.registerLanguage("swift", swift);
|
||||
hljs.registerLanguage("kotlin", kotlin);
|
||||
|
||||
// Aliases
|
||||
hljs.registerLanguage("js", javascript);
|
||||
hljs.registerLanguage("ts", typescript);
|
||||
hljs.registerLanguage("tsx", typescript);
|
||||
hljs.registerLanguage("jsx", javascript);
|
||||
hljs.registerLanguage("py", python);
|
||||
hljs.registerLanguage("rs", rust);
|
||||
hljs.registerLanguage("rb", ruby);
|
||||
hljs.registerLanguage("sh", bash);
|
||||
hljs.registerLanguage("html", xml);
|
||||
hljs.registerLanguage("htm", xml);
|
||||
hljs.registerLanguage("yml", yaml);
|
||||
hljs.registerLanguage("cs", csharp);
|
||||
hljs.registerLanguage("kt", kotlin);
|
||||
|
||||
const extToLang: Record<string, string> = {
|
||||
js: "javascript",
|
||||
mjs: "javascript",
|
||||
cjs: "javascript",
|
||||
ts: "typescript",
|
||||
tsx: "typescript",
|
||||
jsx: "javascript",
|
||||
py: "python",
|
||||
rs: "rust",
|
||||
go: "go",
|
||||
java: "java",
|
||||
css: "css",
|
||||
json: "json",
|
||||
html: "xml",
|
||||
htm: "xml",
|
||||
xml: "xml",
|
||||
sh: "bash",
|
||||
bash: "bash",
|
||||
sql: "sql",
|
||||
yml: "yaml",
|
||||
yaml: "yaml",
|
||||
md: "markdown",
|
||||
cpp: "cpp",
|
||||
cc: "cpp",
|
||||
c: "cpp",
|
||||
h: "cpp",
|
||||
cs: "csharp",
|
||||
rb: "ruby",
|
||||
swift: "swift",
|
||||
kt: "kotlin",
|
||||
};
|
||||
|
||||
function getLanguageFromPath(path: string): string {
|
||||
const ext = path.split(".").pop()?.toLowerCase() || "";
|
||||
return extToLang[ext] || "plaintext";
|
||||
}
|
||||
|
||||
type ViewMode = "fragment" | "file";
|
||||
|
||||
interface CodeFragmentCardProps {
|
||||
fragment: CodeFragment;
|
||||
fileContent?: string;
|
||||
isLoading?: boolean;
|
||||
unchangedRegion?: boolean;
|
||||
hideHeader?: boolean;
|
||||
}
|
||||
|
||||
export function CodeFragmentCard({
|
||||
fragment,
|
||||
fileContent,
|
||||
isLoading,
|
||||
unchangedRegion,
|
||||
hideHeader,
|
||||
}: CodeFragmentCardProps) {
|
||||
const { file, start_line, end_line, message } = fragment;
|
||||
const [viewMode, setViewMode] = useState<ViewMode>("fragment");
|
||||
const lang = getLanguageFromPath(file);
|
||||
|
||||
const highlightedLines = useMemo(() => {
|
||||
if (!fileContent) return null;
|
||||
|
||||
if (viewMode === "fragment") {
|
||||
return getHighlightedLines(fileContent, start_line, end_line, lang);
|
||||
} else {
|
||||
// Full file view
|
||||
const allLines = fileContent.split(/\r?\n/);
|
||||
return getHighlightedLines(fileContent, 1, allLines.length, lang);
|
||||
}
|
||||
}, [fileContent, start_line, end_line, lang, viewMode]);
|
||||
|
||||
const isInFragment = (lineNumber: number) =>
|
||||
lineNumber >= start_line && lineNumber <= end_line;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={hideHeader ? "" : "border rounded bg-muted/40 overflow-hidden"}
|
||||
>
|
||||
{/* Header */}
|
||||
{!hideHeader && (
|
||||
<div className="px-3 py-2 border-b bg-muted/60">
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="flex items-center gap-2 text-xs text-muted-foreground min-w-0">
|
||||
<span className="font-mono truncate">{file}</span>
|
||||
<span className="shrink-0">
|
||||
Lines {start_line}
|
||||
{end_line !== start_line && `–${end_line}`}
|
||||
</span>
|
||||
{unchangedRegion && (
|
||||
<span className="shrink-0 px-1.5 py-0.5 rounded text-[10px] bg-muted text-muted-foreground">
|
||||
Unchanged
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center gap-1 shrink-0 ml-auto">
|
||||
{fileContent && (
|
||||
<button
|
||||
className="h-6 px-2 rounded hover:bg-muted transition-colors flex items-center justify-center"
|
||||
onClick={() =>
|
||||
setViewMode((prev) =>
|
||||
prev === "fragment" ? "file" : "fragment",
|
||||
)
|
||||
}
|
||||
title={
|
||||
viewMode === "fragment"
|
||||
? "View full file"
|
||||
: "View fragment only"
|
||||
}
|
||||
>
|
||||
{viewMode === "fragment" ? (
|
||||
<svg
|
||||
className="h-3 w-3"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M4 8V4m0 0h4M4 4l5 5m11-1V4m0 0h-4m4 0l-5 5M4 16v4m0 0h4m-4 0l5-5m11 5l-5-5m5 5v-4m0 4h-4"
|
||||
/>
|
||||
</svg>
|
||||
) : (
|
||||
<svg
|
||||
className="h-3 w-3"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M9 9V4.5M9 9H4.5M9 9L3.75 3.75M9 15v4.5M9 15H4.5M9 15l-5.25 5.25M15 9h4.5M15 9V4.5M15 9l5.25-5.25M15 15h4.5M15 15v4.5m0-4.5l5.25 5.25"
|
||||
/>
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{message && (
|
||||
<div className="flex items-start gap-1.5 text-xs text-amber-600 dark:text-amber-400 mt-1.5 italic">
|
||||
<svg
|
||||
className="h-3.5 w-3.5 shrink-0 mt-0.5"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M7 8h10M7 12h4m1 8l-4-4H5a2 2 0 01-2-2V6a2 2 0 012-2h14a2 2 0 012 2v8a2 2 0 01-2 2h-3l-4 4z"
|
||||
/>
|
||||
</svg>
|
||||
<span>{message}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Code Content */}
|
||||
{isLoading ? (
|
||||
<div className="px-3 py-4 flex items-center justify-center">
|
||||
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-muted-foreground/60"></div>
|
||||
<span className="ml-2 text-xs text-muted-foreground">Loading...</span>
|
||||
</div>
|
||||
) : highlightedLines ? (
|
||||
<div className="overflow-x-auto">
|
||||
<table className="w-full text-xs font-mono border-collapse">
|
||||
<tbody>
|
||||
{highlightedLines.map(({ lineNumber, html }) => (
|
||||
<tr
|
||||
key={lineNumber}
|
||||
className={`hover:bg-muted/50 leading-5 ${
|
||||
viewMode === "file" && isInFragment(lineNumber)
|
||||
? "bg-amber-500/10"
|
||||
: ""
|
||||
}`}
|
||||
>
|
||||
<td className="select-none px-3 py-0 text-right text-muted-foreground/60 border-r w-[1%] min-w-[40px] align-top">
|
||||
{lineNumber}
|
||||
</td>
|
||||
<td
|
||||
className="px-3 py-0 whitespace-pre"
|
||||
dangerouslySetInnerHTML={{ __html: html || " " }}
|
||||
/>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
) : (
|
||||
<div className="px-3 py-4 text-xs text-muted-foreground">
|
||||
File content unavailable for this fragment.
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function getHighlightedLines(
|
||||
content: string,
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
lang: string,
|
||||
): { lineNumber: number; html: string }[] {
|
||||
const allLines = content.split(/\r?\n/);
|
||||
const s = Math.max(1, startLine);
|
||||
const e = Math.min(allLines.length, endLine);
|
||||
const result: { lineNumber: number; html: string }[] = [];
|
||||
|
||||
for (let i = s; i <= e; i++) {
|
||||
const line = allLines[i - 1] || "";
|
||||
let html: string;
|
||||
|
||||
try {
|
||||
if (lang !== "plaintext" && hljs.getLanguage(lang)) {
|
||||
html = hljs.highlight(line, {
|
||||
language: lang,
|
||||
ignoreIllegals: true,
|
||||
}).value;
|
||||
} else {
|
||||
html = escapeHtml(line);
|
||||
}
|
||||
} catch {
|
||||
html = escapeHtml(line);
|
||||
}
|
||||
|
||||
result.push({ lineNumber: i, html });
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function escapeHtml(text: string): string {
|
||||
return text
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">");
|
||||
}
|
||||
84
remote-frontend/src/components/MarkdownRenderer.tsx
Normal file
84
remote-frontend/src/components/MarkdownRenderer.tsx
Normal file
@@ -0,0 +1,84 @@
|
||||
interface MarkdownRendererProps {
|
||||
content: string;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function MarkdownRenderer({
|
||||
content,
|
||||
className = "",
|
||||
}: MarkdownRendererProps) {
|
||||
return (
|
||||
<div
|
||||
className={`prose prose-sm max-w-none [&>*:first-child]:mt-0 ${className}`}
|
||||
dangerouslySetInnerHTML={{ __html: parseMarkdown(content) }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function parseMarkdown(text: string): string {
|
||||
if (!text) return "";
|
||||
|
||||
let html = text
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">");
|
||||
|
||||
// Code blocks (must be before inline code)
|
||||
html = html.replace(/```(\w*)\n([\s\S]*?)```/g, (_, _lang, code) => {
|
||||
return `<pre class="bg-secondary rounded-md px-3 py-2 my-2 overflow-x-auto"><code class="text-xs font-mono">${code.trim()}</code></pre>`;
|
||||
});
|
||||
|
||||
// Headings
|
||||
html = html.replace(
|
||||
/^### (.+)$/gm,
|
||||
'<h3 class="text-base font-semibold mt-3 mb-2">$1</h3>',
|
||||
);
|
||||
html = html.replace(
|
||||
/^## (.+)$/gm,
|
||||
'<h2 class="text-lg font-semibold mt-3 mb-2">$1</h2>',
|
||||
);
|
||||
html = html.replace(
|
||||
/^# (.+)$/gm,
|
||||
'<h1 class="text-xl font-bold mt-3 mb-2">$1</h1>',
|
||||
);
|
||||
|
||||
// Bold and italic
|
||||
html = html.replace(
|
||||
/\*\*(.+?)\*\*/g,
|
||||
'<strong class="font-semibold">$1</strong>',
|
||||
);
|
||||
html = html.replace(/\*(.+?)\*/g, '<em class="italic">$1</em>');
|
||||
|
||||
// Inline code
|
||||
html = html.replace(
|
||||
/`([^`]+)`/g,
|
||||
'<code class="font-mono bg-muted px-1 py-0.5 rounded text-xs">$1</code>',
|
||||
);
|
||||
|
||||
// Links
|
||||
html = html.replace(
|
||||
/\[([^\]]+)\]\(([^)]+)\)/g,
|
||||
'<a href="$2" class="text-blue-600 underline hover:text-blue-800" target="_blank" rel="noopener">$1</a>',
|
||||
);
|
||||
|
||||
// Lists
|
||||
html = html.replace(/^- (.+)$/gm, '<li class="ml-4">$1</li>');
|
||||
html = html.replace(
|
||||
/(<li[^>]*>.*<\/li>\n?)+/g,
|
||||
'<ul class="list-disc my-2">$&</ul>',
|
||||
);
|
||||
|
||||
// Paragraphs - wrap lines that aren't already wrapped in tags
|
||||
html = html.replace(
|
||||
/^(?!<[huplo]|<li|<pre)(.+)$/gm,
|
||||
'<p class="mb-2 last:mb-0">$1</p>',
|
||||
);
|
||||
|
||||
// Clean up empty paragraphs
|
||||
html = html.replace(/<p class="mb-2 last:mb-0"><\/p>/g, "");
|
||||
|
||||
// Line breaks within paragraphs
|
||||
html = html.replace(/\n(?!<)/g, "<br>");
|
||||
|
||||
return html;
|
||||
}
|
||||
@@ -1,3 +1,100 @@
|
||||
@import url("https://fonts.googleapis.com/css2?family=Chivo+Mono:ital,wght@0,100..900;1,100..900&family=Inter:wght@400;500;600;700&family=JetBrains+Mono:wght@400;500;600&family=Noto+Emoji:wght@300..700&display=swap");
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@layer base {
|
||||
:root {
|
||||
color-scheme: dark;
|
||||
--background: 0 0% 12%;
|
||||
/* #1E1E1E */
|
||||
--foreground: 0 0% 90%;
|
||||
--primary: 0 0% 13%;
|
||||
/* #212121 */
|
||||
--primary-foreground: 0 0% 90%;
|
||||
--secondary: 0 0% 13%;
|
||||
/* #212121 */
|
||||
--secondary-foreground: 0 0% 70%;
|
||||
--muted: 0 0% 19%;
|
||||
/* #212121 */
|
||||
--muted-foreground: 0 0% 65%;
|
||||
--accent: 0 0% 16%;
|
||||
/* #292929 */
|
||||
--accent-foreground: 0 0% 90%;
|
||||
--destructive: 0 45% 55%;
|
||||
--destructive-foreground: 0 0% 90%;
|
||||
--border: 0 0% 16%;
|
||||
/* #292929 */
|
||||
--input: 0 0% 16%;
|
||||
/* #292929 */
|
||||
--ring: 0 0% 50%;
|
||||
--radius: 0.5rem;
|
||||
|
||||
/* Syntax highlighting (dark) */
|
||||
--syntax-keyword: #ff7b72;
|
||||
--syntax-function: #d2a8ff;
|
||||
--syntax-constant: #79c0ff;
|
||||
--syntax-string: #a5d6ff;
|
||||
--syntax-variable: #ffa657;
|
||||
--syntax-comment: #8b949e;
|
||||
--syntax-tag: #7ee787;
|
||||
--syntax-punctuation: #c9d1d9;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border;
|
||||
}
|
||||
|
||||
html,
|
||||
body,
|
||||
#root {
|
||||
@apply min-h-screen;
|
||||
}
|
||||
|
||||
body {
|
||||
@apply bg-background text-foreground font-sans;
|
||||
}
|
||||
}
|
||||
|
||||
/* Syntax highlighting classes */
|
||||
.hljs-keyword,
|
||||
.hljs-type {
|
||||
color: var(--syntax-keyword);
|
||||
}
|
||||
|
||||
.hljs-title,
|
||||
.hljs-title.function_ {
|
||||
color: var(--syntax-function);
|
||||
}
|
||||
|
||||
.hljs-attr,
|
||||
.hljs-number,
|
||||
.hljs-literal {
|
||||
color: var(--syntax-constant);
|
||||
}
|
||||
|
||||
.hljs-string {
|
||||
color: var(--syntax-string);
|
||||
}
|
||||
|
||||
.hljs-built_in,
|
||||
.hljs-symbol {
|
||||
color: var(--syntax-variable);
|
||||
}
|
||||
|
||||
.hljs-comment {
|
||||
color: var(--syntax-comment);
|
||||
}
|
||||
|
||||
.hljs-name,
|
||||
.hljs-tag {
|
||||
color: var(--syntax-tag);
|
||||
}
|
||||
|
||||
.hljs-punctuation,
|
||||
.hljs-property {
|
||||
color: var(--syntax-punctuation);
|
||||
}
|
||||
|
||||
281
remote-frontend/src/lib/diff-parser.ts
Normal file
281
remote-frontend/src/lib/diff-parser.ts
Normal file
@@ -0,0 +1,281 @@
|
||||
export interface ParsedHunk {
|
||||
header: string;
|
||||
oldStart: number;
|
||||
oldLines: number;
|
||||
newStart: number;
|
||||
newLines: number;
|
||||
lines: string[];
|
||||
}
|
||||
|
||||
export interface ParsedFileDiff {
|
||||
oldPath: string;
|
||||
newPath: string;
|
||||
hunks: ParsedHunk[];
|
||||
rawDiff: string;
|
||||
}
|
||||
|
||||
export function parseUnifiedDiff(diffText: string): ParsedFileDiff[] {
|
||||
const files: ParsedFileDiff[] = [];
|
||||
const lines = diffText.split("\n");
|
||||
|
||||
let currentFile: ParsedFileDiff | null = null;
|
||||
let currentHunk: ParsedHunk | null = null;
|
||||
let fileStartIdx = 0;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
if (line.startsWith("diff --git")) {
|
||||
if (currentHunk && currentFile) currentFile.hunks.push(currentHunk);
|
||||
if (currentFile) {
|
||||
currentFile.rawDiff = lines.slice(fileStartIdx, i).join("\n");
|
||||
files.push(currentFile);
|
||||
}
|
||||
currentFile = { oldPath: "", newPath: "", hunks: [], rawDiff: "" };
|
||||
currentHunk = null;
|
||||
fileStartIdx = i;
|
||||
} else if (line.startsWith("--- ")) {
|
||||
if (currentFile) {
|
||||
currentFile.oldPath = line.slice(4).replace(/^a\//, "");
|
||||
}
|
||||
} else if (line.startsWith("+++ ")) {
|
||||
if (currentFile) {
|
||||
currentFile.newPath = line.slice(4).replace(/^b\//, "");
|
||||
}
|
||||
} else if (line.startsWith("@@")) {
|
||||
const match = line.match(/@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@(.*)?/);
|
||||
if (match && currentFile) {
|
||||
if (currentHunk) currentFile.hunks.push(currentHunk);
|
||||
currentHunk = {
|
||||
header: line,
|
||||
oldStart: parseInt(match[1], 10),
|
||||
oldLines: match[2] ? parseInt(match[2], 10) : 1,
|
||||
newStart: parseInt(match[3], 10),
|
||||
newLines: match[4] ? parseInt(match[4], 10) : 1,
|
||||
lines: [],
|
||||
};
|
||||
}
|
||||
} else if (
|
||||
currentHunk &&
|
||||
(line.startsWith("+") || line.startsWith("-") || line.startsWith(" "))
|
||||
) {
|
||||
currentHunk.lines.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
if (currentHunk && currentFile) currentFile.hunks.push(currentHunk);
|
||||
if (currentFile) {
|
||||
currentFile.rawDiff = lines.slice(fileStartIdx).join("\n");
|
||||
files.push(currentFile);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
export function getFileDiff(
|
||||
parsedDiffs: ParsedFileDiff[],
|
||||
filePath: string,
|
||||
): ParsedFileDiff | undefined {
|
||||
return parsedDiffs.find(
|
||||
(f) => f.newPath === filePath || f.oldPath === filePath,
|
||||
);
|
||||
}
|
||||
|
||||
export function hunkOverlapsRange(
|
||||
hunk: ParsedHunk,
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
): boolean {
|
||||
const hunkEnd = hunk.newStart + hunk.newLines - 1;
|
||||
return hunk.newStart <= endLine && hunkEnd >= startLine;
|
||||
}
|
||||
|
||||
export function filterHunksToRange(
|
||||
fileDiff: ParsedFileDiff,
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
contextLines: number = 3,
|
||||
): string {
|
||||
const expandedStart = Math.max(1, startLine - contextLines);
|
||||
const expandedEnd = endLine + contextLines;
|
||||
|
||||
const relevantHunks = fileDiff.hunks.filter((h) =>
|
||||
hunkOverlapsRange(h, expandedStart, expandedEnd),
|
||||
);
|
||||
|
||||
if (relevantHunks.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const diffLines: string[] = [];
|
||||
diffLines.push(`--- a/${fileDiff.oldPath}`);
|
||||
diffLines.push(`+++ b/${fileDiff.newPath}`);
|
||||
|
||||
for (const hunk of relevantHunks) {
|
||||
diffLines.push(hunk.header);
|
||||
diffLines.push(...hunk.lines);
|
||||
}
|
||||
|
||||
return diffLines.join("\n");
|
||||
}
|
||||
|
||||
export function buildFullFileDiff(fileDiff: ParsedFileDiff): string {
|
||||
if (fileDiff.hunks.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const diffLines: string[] = [];
|
||||
diffLines.push(`--- a/${fileDiff.oldPath}`);
|
||||
diffLines.push(`+++ b/${fileDiff.newPath}`);
|
||||
|
||||
for (const hunk of fileDiff.hunks) {
|
||||
diffLines.push(hunk.header);
|
||||
diffLines.push(...hunk.lines);
|
||||
}
|
||||
|
||||
return diffLines.join("\n") + "\n";
|
||||
}
|
||||
|
||||
export interface HunkLineInfo {
|
||||
newLineNumber: number | null;
|
||||
oldLineNumber: number | null;
|
||||
type: "add" | "delete" | "context";
|
||||
content: string;
|
||||
}
|
||||
|
||||
export function parseHunkLines(hunk: ParsedHunk): HunkLineInfo[] {
|
||||
const result: HunkLineInfo[] = [];
|
||||
let oldLine = hunk.oldStart;
|
||||
let newLine = hunk.newStart;
|
||||
|
||||
for (const line of hunk.lines) {
|
||||
const prefix = line[0];
|
||||
const content = line.slice(1);
|
||||
|
||||
if (prefix === "+") {
|
||||
result.push({
|
||||
newLineNumber: newLine,
|
||||
oldLineNumber: null,
|
||||
type: "add",
|
||||
content,
|
||||
});
|
||||
newLine++;
|
||||
} else if (prefix === "-") {
|
||||
result.push({
|
||||
newLineNumber: null,
|
||||
oldLineNumber: oldLine,
|
||||
type: "delete",
|
||||
content,
|
||||
});
|
||||
oldLine++;
|
||||
} else {
|
||||
result.push({
|
||||
newLineNumber: newLine,
|
||||
oldLineNumber: oldLine,
|
||||
type: "context",
|
||||
content,
|
||||
});
|
||||
oldLine++;
|
||||
newLine++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function synthesizeFragmentDiff(
|
||||
fileDiff: ParsedFileDiff,
|
||||
newFileContent: string,
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
contextLines: number = 3,
|
||||
): string {
|
||||
const newFileLines = newFileContent.split(/\r?\n/);
|
||||
const expandedStart = Math.max(1, startLine - contextLines);
|
||||
const expandedEnd = Math.min(newFileLines.length, endLine + contextLines);
|
||||
|
||||
const relevantHunks = fileDiff.hunks.filter((h) =>
|
||||
hunkOverlapsRange(h, expandedStart, expandedEnd),
|
||||
);
|
||||
|
||||
const changeMap = new Map<
|
||||
number,
|
||||
{ type: "add" | "context"; deletionsBefore: string[] }
|
||||
>();
|
||||
|
||||
for (let i = expandedStart; i <= expandedEnd; i++) {
|
||||
changeMap.set(i, { type: "context", deletionsBefore: [] });
|
||||
}
|
||||
|
||||
for (const hunk of relevantHunks) {
|
||||
const lines = parseHunkLines(hunk);
|
||||
let pendingDeletions: string[] = [];
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.type === "delete") {
|
||||
pendingDeletions.push(line.content);
|
||||
} else {
|
||||
const newLineNum = line.newLineNumber!;
|
||||
if (newLineNum >= expandedStart && newLineNum <= expandedEnd) {
|
||||
const existing = changeMap.get(newLineNum)!;
|
||||
existing.deletionsBefore.push(...pendingDeletions);
|
||||
if (line.type === "add") {
|
||||
existing.type = "add";
|
||||
}
|
||||
}
|
||||
pendingDeletions = [];
|
||||
}
|
||||
}
|
||||
|
||||
if (pendingDeletions.length > 0) {
|
||||
const lastNewLine = Math.min(
|
||||
hunk.newStart + hunk.newLines,
|
||||
expandedEnd + 1,
|
||||
);
|
||||
if (lastNewLine <= expandedEnd) {
|
||||
const existing = changeMap.get(lastNewLine);
|
||||
if (existing) {
|
||||
existing.deletionsBefore.push(...pendingDeletions);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const outputLines: string[] = [];
|
||||
let oldLineCount = 0;
|
||||
let newLineCount = 0;
|
||||
|
||||
for (let i = expandedStart; i <= expandedEnd; i++) {
|
||||
const info = changeMap.get(i)!;
|
||||
const lineContent = newFileLines[i - 1] ?? "";
|
||||
|
||||
for (const del of info.deletionsBefore) {
|
||||
outputLines.push(`-${del}`);
|
||||
oldLineCount++;
|
||||
}
|
||||
|
||||
if (info.type === "add") {
|
||||
outputLines.push(`+${lineContent}`);
|
||||
newLineCount++;
|
||||
} else {
|
||||
outputLines.push(` ${lineContent}`);
|
||||
oldLineCount++;
|
||||
newLineCount++;
|
||||
}
|
||||
}
|
||||
|
||||
const oldStart = expandedStart;
|
||||
const header = `@@ -${oldStart},${oldLineCount} +${expandedStart},${newLineCount} @@`;
|
||||
|
||||
if (outputLines.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const diffLines: string[] = [];
|
||||
diffLines.push(`--- a/${fileDiff.oldPath}`);
|
||||
diffLines.push(`+++ b/${fileDiff.newPath}`);
|
||||
diffLines.push(header);
|
||||
diffLines.push(...outputLines);
|
||||
|
||||
return diffLines.join("\n") + "\n";
|
||||
}
|
||||
72
remote-frontend/src/lib/extToLanguage.ts
Normal file
72
remote-frontend/src/lib/extToLanguage.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* getHighlightLanguage(ext)
|
||||
* Returns the Highlight.js language id (or null if not mapped).
|
||||
*
|
||||
* @param {string} ext – File extension with or without the leading dot.
|
||||
* @example
|
||||
* getHighlightLanguage('.py'); // "python"
|
||||
* getHighlightLanguage('tsx'); // "tsx"
|
||||
*/
|
||||
const extToLang: Record<string, string> = {
|
||||
// Web & scripting
|
||||
js: "javascript",
|
||||
mjs: "javascript",
|
||||
cjs: "javascript",
|
||||
ts: "typescript",
|
||||
jsx: "jsx",
|
||||
tsx: "tsx",
|
||||
html: "xml", // Highlight.js groups HTML/XML
|
||||
htm: "xml",
|
||||
xml: "xml",
|
||||
css: "css",
|
||||
scss: "scss",
|
||||
less: "less",
|
||||
json: "json",
|
||||
md: "markdown",
|
||||
yml: "yaml",
|
||||
yaml: "yaml",
|
||||
sh: "bash",
|
||||
bash: "bash",
|
||||
zsh: "bash",
|
||||
ps1: "powershell",
|
||||
php: "php",
|
||||
|
||||
// Classic compiled
|
||||
c: "c",
|
||||
h: "c",
|
||||
cpp: "cpp",
|
||||
cc: "cpp",
|
||||
cxx: "cpp",
|
||||
hpp: "cpp",
|
||||
cs: "csharp",
|
||||
java: "java",
|
||||
kt: "kotlin",
|
||||
scala: "scala",
|
||||
go: "go",
|
||||
rs: "rust",
|
||||
swift: "swift",
|
||||
dart: "dart",
|
||||
|
||||
// Others & fun stuff
|
||||
py: "python",
|
||||
rb: "ruby",
|
||||
pl: "perl",
|
||||
lua: "lua",
|
||||
r: "r",
|
||||
sql: "sql",
|
||||
tex: "latex",
|
||||
toml: "ini",
|
||||
};
|
||||
|
||||
/**
|
||||
* Normalises the extension and looks it up.
|
||||
*/
|
||||
export function getHighlightLanguage(ext: string): string | null {
|
||||
ext = ext.toLowerCase();
|
||||
return extToLang[ext] ?? null;
|
||||
}
|
||||
|
||||
export function getHighlightLanguageFromPath(path: string): string {
|
||||
const ext = path.split(".").pop() ?? "";
|
||||
return getHighlightLanguage(ext) ?? "plaintext";
|
||||
}
|
||||
42
remote-frontend/src/lib/utils.ts
Normal file
42
remote-frontend/src/lib/utils.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { type ClassValue, clsx } from "clsx";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs));
|
||||
}
|
||||
|
||||
const extToLang: Record<string, string> = {
|
||||
js: "javascript",
|
||||
mjs: "javascript",
|
||||
cjs: "javascript",
|
||||
ts: "typescript",
|
||||
jsx: "jsx",
|
||||
tsx: "tsx",
|
||||
html: "xml",
|
||||
htm: "xml",
|
||||
xml: "xml",
|
||||
css: "css",
|
||||
scss: "scss",
|
||||
json: "json",
|
||||
md: "markdown",
|
||||
yml: "yaml",
|
||||
yaml: "yaml",
|
||||
sh: "bash",
|
||||
bash: "bash",
|
||||
py: "python",
|
||||
rb: "ruby",
|
||||
go: "go",
|
||||
rs: "rust",
|
||||
java: "java",
|
||||
c: "c",
|
||||
cpp: "cpp",
|
||||
cs: "csharp",
|
||||
swift: "swift",
|
||||
kt: "kotlin",
|
||||
sql: "sql",
|
||||
};
|
||||
|
||||
export function getLanguageFromPath(path: string): string {
|
||||
const ext = path.split(".").pop()?.toLowerCase() || "";
|
||||
return extToLang[ext] || "plaintext";
|
||||
}
|
||||
@@ -1,10 +1,20 @@
|
||||
import React from 'react'
|
||||
import ReactDOM from 'react-dom/client'
|
||||
import AppRouter from './AppRouter.tsx'
|
||||
import './index.css'
|
||||
import React from "react";
|
||||
import ReactDOM from "react-dom/client";
|
||||
import posthog from "posthog-js";
|
||||
import { PostHogProvider } from "posthog-js/react";
|
||||
import AppRouter from "./AppRouter.tsx";
|
||||
import "./index.css";
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root')!).render(
|
||||
if (import.meta.env.VITE_PUBLIC_POSTHOG_KEY) {
|
||||
posthog.init(import.meta.env.VITE_PUBLIC_POSTHOG_KEY, {
|
||||
api_host: import.meta.env.VITE_PUBLIC_POSTHOG_HOST,
|
||||
});
|
||||
}
|
||||
|
||||
ReactDOM.createRoot(document.getElementById("root")!).render(
|
||||
<React.StrictMode>
|
||||
<AppRouter />
|
||||
<PostHogProvider client={posthog}>
|
||||
<AppRouter />
|
||||
</PostHogProvider>
|
||||
</React.StrictMode>,
|
||||
)
|
||||
);
|
||||
|
||||
149
remote-frontend/src/pages/AccountCompletePage.tsx
Normal file
149
remote-frontend/src/pages/AccountCompletePage.tsx
Normal file
@@ -0,0 +1,149 @@
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import { useLocation, useNavigate } from "react-router-dom";
|
||||
import { redeemOAuth } from "../api";
|
||||
import { storeTokens } from "../auth";
|
||||
import { retrieveVerifier, clearVerifier } from "../pkce";
|
||||
|
||||
export default function AccountCompletePage() {
|
||||
const navigate = useNavigate();
|
||||
const { search } = useLocation();
|
||||
const qp = useMemo(() => new URLSearchParams(search), [search]);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [success, setSuccess] = useState(false);
|
||||
|
||||
const handoffId = qp.get("handoff_id");
|
||||
const appCode = qp.get("app_code");
|
||||
const oauthError = qp.get("error");
|
||||
|
||||
useEffect(() => {
|
||||
const completeLogin = async () => {
|
||||
if (oauthError) {
|
||||
setError(`OAuth error: ${oauthError}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!handoffId || !appCode) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const verifier = retrieveVerifier();
|
||||
if (!verifier) {
|
||||
setError("OAuth session lost. Please try again.");
|
||||
return;
|
||||
}
|
||||
|
||||
const { access_token, refresh_token } = await redeemOAuth(
|
||||
handoffId,
|
||||
appCode,
|
||||
verifier,
|
||||
);
|
||||
|
||||
storeTokens(access_token, refresh_token);
|
||||
clearVerifier();
|
||||
|
||||
setSuccess(true);
|
||||
|
||||
// Redirect to account page after brief delay
|
||||
setTimeout(() => {
|
||||
navigate("/account", { replace: true });
|
||||
}, 1000);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to complete login");
|
||||
clearVerifier();
|
||||
}
|
||||
};
|
||||
|
||||
completeLogin();
|
||||
}, [handoffId, appCode, oauthError, navigate]);
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<StatusCard
|
||||
title="Login failed"
|
||||
body={error}
|
||||
isError
|
||||
showRetry
|
||||
onRetry={() => navigate("/account", { replace: true })}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (success) {
|
||||
return (
|
||||
<StatusCard
|
||||
title="Login successful!"
|
||||
body="Redirecting to your account..."
|
||||
isSuccess
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<StatusCard title="Completing login..." body="Processing OAuth callback..." />
|
||||
);
|
||||
}
|
||||
|
||||
function StatusCard({
|
||||
title,
|
||||
body,
|
||||
isError = false,
|
||||
isSuccess = false,
|
||||
showRetry = false,
|
||||
onRetry,
|
||||
}: {
|
||||
title: string;
|
||||
body: string;
|
||||
isError?: boolean;
|
||||
isSuccess?: boolean;
|
||||
showRetry?: boolean;
|
||||
onRetry?: () => void;
|
||||
}) {
|
||||
return (
|
||||
<div className="min-h-screen grid place-items-center bg-gray-50 p-4">
|
||||
<div className="max-w-md w-full bg-white shadow rounded-lg p-6">
|
||||
<h2
|
||||
className={`text-lg font-semibold ${
|
||||
isError
|
||||
? "text-red-600"
|
||||
: isSuccess
|
||||
? "text-green-600"
|
||||
: "text-gray-900"
|
||||
}`}
|
||||
>
|
||||
{title}
|
||||
</h2>
|
||||
<p className="text-gray-600 mt-2">{body}</p>
|
||||
{isSuccess && (
|
||||
<div className="mt-4 flex items-center text-sm text-gray-500">
|
||||
<svg className="animate-spin h-4 w-4 mr-2" viewBox="0 0 24 24">
|
||||
<circle
|
||||
className="opacity-25"
|
||||
cx="12"
|
||||
cy="12"
|
||||
r="10"
|
||||
stroke="currentColor"
|
||||
strokeWidth="4"
|
||||
fill="none"
|
||||
/>
|
||||
<path
|
||||
className="opacity-75"
|
||||
fill="currentColor"
|
||||
d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
|
||||
/>
|
||||
</svg>
|
||||
Redirecting...
|
||||
</div>
|
||||
)}
|
||||
{showRetry && onRetry && (
|
||||
<button
|
||||
onClick={onRetry}
|
||||
className="mt-4 w-full py-2 px-4 bg-gray-900 text-white rounded-lg hover:bg-gray-800 transition-colors font-medium"
|
||||
>
|
||||
Try again
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
357
remote-frontend/src/pages/AccountPage.tsx
Normal file
357
remote-frontend/src/pages/AccountPage.tsx
Normal file
@@ -0,0 +1,357 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { Link, useNavigate } from "react-router-dom";
|
||||
import { isLoggedIn } from "../auth";
|
||||
import {
|
||||
initOAuth,
|
||||
getProfile,
|
||||
logout,
|
||||
listOrganizations,
|
||||
createOrganization,
|
||||
type OAuthProvider,
|
||||
type ProfileResponse,
|
||||
type OrganizationWithRole,
|
||||
} from "../api";
|
||||
import {
|
||||
generateVerifier,
|
||||
generateChallenge,
|
||||
storeVerifier,
|
||||
} from "../pkce";
|
||||
|
||||
export default function AccountPage() {
|
||||
const navigate = useNavigate();
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [authenticated, setAuthenticated] = useState(false);
|
||||
const [profile, setProfile] = useState<ProfileResponse | null>(null);
|
||||
const [organizations, setOrganizations] = useState<OrganizationWithRole[]>(
|
||||
[],
|
||||
);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [oauthLoading, setOauthLoading] = useState(false);
|
||||
|
||||
// Create org form state
|
||||
const [showCreateForm, setShowCreateForm] = useState(false);
|
||||
const [newOrgName, setNewOrgName] = useState("");
|
||||
const [newOrgSlug, setNewOrgSlug] = useState("");
|
||||
const [createLoading, setCreateLoading] = useState(false);
|
||||
const [createError, setCreateError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (isLoggedIn()) {
|
||||
setAuthenticated(true);
|
||||
loadData();
|
||||
} else {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
async function loadData() {
|
||||
try {
|
||||
const [profileData, orgsData] = await Promise.all([
|
||||
getProfile(),
|
||||
listOrganizations(),
|
||||
]);
|
||||
setProfile(profileData);
|
||||
setOrganizations(orgsData);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to load data");
|
||||
setAuthenticated(false);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
const handleOAuthLogin = async (provider: OAuthProvider) => {
|
||||
setOauthLoading(true);
|
||||
try {
|
||||
const verifier = generateVerifier();
|
||||
const challenge = await generateChallenge(verifier);
|
||||
storeVerifier(verifier);
|
||||
|
||||
const appBase =
|
||||
import.meta.env.VITE_APP_BASE_URL || window.location.origin;
|
||||
const returnTo = `${appBase}/account/complete`;
|
||||
|
||||
const result = await initOAuth(provider, returnTo, challenge);
|
||||
window.location.assign(result.authorize_url);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "OAuth init failed");
|
||||
setOauthLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleLogout = async () => {
|
||||
try {
|
||||
await logout();
|
||||
setAuthenticated(false);
|
||||
setProfile(null);
|
||||
setOrganizations([]);
|
||||
} catch (e) {
|
||||
// Tokens already cleared in logout()
|
||||
setAuthenticated(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateOrg = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
setCreateLoading(true);
|
||||
setCreateError(null);
|
||||
|
||||
try {
|
||||
const org = await createOrganization({
|
||||
name: newOrgName.trim(),
|
||||
slug: newOrgSlug.trim().toLowerCase(),
|
||||
});
|
||||
navigate(`/account/organizations/${org.id}`);
|
||||
} catch (err) {
|
||||
setCreateError(err instanceof Error ? err.message : "Failed to create");
|
||||
} finally {
|
||||
setCreateLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const generateSlug = (name: string) => {
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, "-")
|
||||
.replace(/^-|-$/g, "")
|
||||
.slice(0, 63);
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
return <LoadingCard text="Loading..." />;
|
||||
}
|
||||
|
||||
if (!authenticated) {
|
||||
return (
|
||||
<div className="min-h-screen flex items-center justify-center bg-gray-50 p-4">
|
||||
<div className="w-full max-w-md bg-white shadow rounded-lg p-6 space-y-4">
|
||||
<div>
|
||||
<h1 className="text-2xl font-bold text-gray-900">Sign In</h1>
|
||||
<p className="text-gray-600 mt-1">
|
||||
Sign in to manage your account and organizations
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="bg-red-50 border border-red-200 rounded-lg p-3">
|
||||
<p className="text-sm text-red-600">{error}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="border-t border-gray-200 pt-4 space-y-3">
|
||||
<OAuthButton
|
||||
label="Continue with GitHub"
|
||||
onClick={() => handleOAuthLogin("github")}
|
||||
disabled={oauthLoading}
|
||||
/>
|
||||
<OAuthButton
|
||||
label="Continue with Google"
|
||||
onClick={() => handleOAuthLogin("google")}
|
||||
disabled={oauthLoading}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-gray-50 p-4">
|
||||
<div className="max-w-2xl mx-auto space-y-6">
|
||||
{/* Profile Card */}
|
||||
<div className="bg-white shadow rounded-lg p-6">
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="flex items-center space-x-4">
|
||||
{profile?.providers[0]?.avatar_url && (
|
||||
<img
|
||||
src={profile.providers[0].avatar_url}
|
||||
alt="Avatar"
|
||||
className="w-16 h-16 rounded-full"
|
||||
/>
|
||||
)}
|
||||
<div>
|
||||
<h1 className="text-xl font-bold text-gray-900">
|
||||
{profile?.providers[0]?.display_name ||
|
||||
profile?.username ||
|
||||
"User"}
|
||||
</h1>
|
||||
<p className="text-gray-600">{profile?.email}</p>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={handleLogout}
|
||||
className="text-sm text-gray-600 hover:text-gray-900"
|
||||
>
|
||||
Sign out
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{profile && profile.providers.length > 0 && (
|
||||
<div className="mt-4 pt-4 border-t border-gray-200">
|
||||
<p className="text-sm text-gray-600 mb-2">Connected accounts:</p>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{profile.providers.map((p) => (
|
||||
<span
|
||||
key={p.provider}
|
||||
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-gray-100 text-gray-800"
|
||||
>
|
||||
{p.provider}
|
||||
{p.username && ` (${p.username})`}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Organizations Card */}
|
||||
<div className="bg-white shadow rounded-lg p-6">
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<h2 className="text-lg font-semibold text-gray-900">
|
||||
Organizations
|
||||
</h2>
|
||||
<button
|
||||
onClick={() => setShowCreateForm(!showCreateForm)}
|
||||
className="text-sm px-3 py-1.5 bg-gray-900 text-white rounded-lg hover:bg-gray-800 transition-colors"
|
||||
>
|
||||
{showCreateForm ? "Cancel" : "New Organization"}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{showCreateForm && (
|
||||
<form
|
||||
onSubmit={handleCreateOrg}
|
||||
className="mb-4 p-4 bg-gray-50 rounded-lg space-y-3"
|
||||
>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Name
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={newOrgName}
|
||||
onChange={(e) => {
|
||||
setNewOrgName(e.target.value);
|
||||
if (!newOrgSlug || newOrgSlug === generateSlug(newOrgName)) {
|
||||
setNewOrgSlug(generateSlug(e.target.value));
|
||||
}
|
||||
}}
|
||||
placeholder="My Organization"
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Slug
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
value={newOrgSlug}
|
||||
onChange={(e) => setNewOrgSlug(e.target.value.toLowerCase())}
|
||||
placeholder="my-organization"
|
||||
pattern="[a-z0-9\-_]+"
|
||||
minLength={3}
|
||||
maxLength={63}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent"
|
||||
required
|
||||
/>
|
||||
<p className="text-xs text-gray-500 mt-1">
|
||||
Only lowercase letters, numbers, hyphens, and underscores
|
||||
</p>
|
||||
</div>
|
||||
{createError && (
|
||||
<p className="text-sm text-red-600">{createError}</p>
|
||||
)}
|
||||
<button
|
||||
type="submit"
|
||||
disabled={createLoading}
|
||||
className="w-full py-2 px-4 bg-gray-900 text-white rounded-lg hover:bg-gray-800 transition-colors font-medium disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{createLoading ? "Creating..." : "Create Organization"}
|
||||
</button>
|
||||
</form>
|
||||
)}
|
||||
|
||||
{organizations.length === 0 ? (
|
||||
<p className="text-gray-600 text-sm">
|
||||
You don't belong to any organizations yet.
|
||||
</p>
|
||||
) : (
|
||||
<div className="divide-y divide-gray-200">
|
||||
{organizations.map((org) => (
|
||||
<Link
|
||||
key={org.id}
|
||||
to={`/account/organizations/${org.id}`}
|
||||
className="flex items-center justify-between py-3 hover:bg-gray-50 -mx-2 px-2 rounded transition-colors"
|
||||
>
|
||||
<div>
|
||||
<p className="font-medium text-gray-900">{org.name}</p>
|
||||
<p className="text-sm text-gray-500">@{org.slug}</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{org.is_personal && (
|
||||
<span className="text-xs px-2 py-0.5 bg-blue-100 text-blue-700 rounded">
|
||||
Personal
|
||||
</span>
|
||||
)}
|
||||
<span
|
||||
className={`text-xs px-2 py-0.5 rounded ${
|
||||
org.user_role === "ADMIN"
|
||||
? "bg-green-100 text-green-700"
|
||||
: "bg-gray-100 text-gray-700"
|
||||
}`}
|
||||
>
|
||||
{org.user_role}
|
||||
</span>
|
||||
<svg
|
||||
className="w-5 h-5 text-gray-400"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M9 5l7 7-7 7"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function OAuthButton({
|
||||
label,
|
||||
onClick,
|
||||
disabled,
|
||||
}: {
|
||||
label: string;
|
||||
onClick: () => void;
|
||||
disabled?: boolean;
|
||||
}) {
|
||||
return (
|
||||
<button
|
||||
onClick={onClick}
|
||||
disabled={disabled}
|
||||
className="w-full py-3 px-4 bg-gray-900 text-white rounded-lg hover:bg-gray-800 transition-colors font-medium disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{label}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
function LoadingCard({ text }: { text: string }) {
|
||||
return (
|
||||
<div className="min-h-screen grid place-items-center bg-gray-50">
|
||||
<div className="text-gray-600">{text}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -18,5 +18,5 @@ export default function HomePage() {
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,97 +1,95 @@
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { useLocation, useParams } from 'react-router-dom'
|
||||
import { redeemOAuth, acceptInvitation } from '../api'
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import { useLocation, useParams } from "react-router-dom";
|
||||
import { redeemOAuth, acceptInvitation } from "../api";
|
||||
import {
|
||||
retrieveVerifier,
|
||||
retrieveInvitationToken,
|
||||
clearVerifier,
|
||||
clearInvitationToken,
|
||||
} from '../pkce'
|
||||
} from "../pkce";
|
||||
|
||||
export default function InvitationCompletePage() {
|
||||
const { token: urlToken } = useParams()
|
||||
const { search } = useLocation()
|
||||
const qp = useMemo(() => new URLSearchParams(search), [search])
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [success, setSuccess] = useState(false)
|
||||
const [orgSlug, setOrgSlug] = useState<string | null>(null)
|
||||
const { token: urlToken } = useParams();
|
||||
const { search } = useLocation();
|
||||
const qp = useMemo(() => new URLSearchParams(search), [search]);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [success, setSuccess] = useState(false);
|
||||
const [orgSlug, setOrgSlug] = useState<string | null>(null);
|
||||
|
||||
const handoffId = qp.get('handoff_id')
|
||||
const appCode = qp.get('app_code')
|
||||
const oauthError = qp.get('error')
|
||||
const handoffId = qp.get("handoff_id");
|
||||
const appCode = qp.get("app_code");
|
||||
const oauthError = qp.get("error");
|
||||
|
||||
useEffect(() => {
|
||||
const completeInvitation = async () => {
|
||||
if (oauthError) {
|
||||
setError(`OAuth error: ${oauthError}`)
|
||||
return
|
||||
setError(`OAuth error: ${oauthError}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!handoffId || !appCode) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const verifier = retrieveVerifier()
|
||||
const verifier = retrieveVerifier();
|
||||
if (!verifier) {
|
||||
setError('OAuth session lost. Please try again.')
|
||||
return
|
||||
setError("OAuth session lost. Please try again.");
|
||||
return;
|
||||
}
|
||||
|
||||
const token = retrieveInvitationToken() || urlToken
|
||||
const token = retrieveInvitationToken() || urlToken;
|
||||
if (!token) {
|
||||
setError('Invitation token lost. Please try again.')
|
||||
return
|
||||
setError("Invitation token lost. Please try again.");
|
||||
return;
|
||||
}
|
||||
|
||||
const { access_token } = await redeemOAuth(
|
||||
handoffId,
|
||||
appCode,
|
||||
verifier
|
||||
)
|
||||
verifier,
|
||||
);
|
||||
|
||||
const result = await acceptInvitation(token, access_token)
|
||||
const result = await acceptInvitation(token, access_token);
|
||||
|
||||
clearVerifier()
|
||||
clearInvitationToken()
|
||||
clearVerifier();
|
||||
clearInvitationToken();
|
||||
|
||||
setSuccess(true)
|
||||
setOrgSlug(result.organization_slug)
|
||||
setSuccess(true);
|
||||
setOrgSlug(result.organization_slug);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
const appBase =
|
||||
import.meta.env.VITE_APP_BASE_URL || window.location.origin
|
||||
window.location.assign(`${appBase}`)
|
||||
}, 2000)
|
||||
return () => clearTimeout(timer)
|
||||
import.meta.env.VITE_APP_BASE_URL || window.location.origin;
|
||||
window.location.assign(`${appBase}`);
|
||||
}, 2000);
|
||||
return () => clearTimeout(timer);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : 'Failed to complete invitation')
|
||||
clearVerifier()
|
||||
clearInvitationToken()
|
||||
setError(
|
||||
e instanceof Error ? e.message : "Failed to complete invitation",
|
||||
);
|
||||
clearVerifier();
|
||||
clearInvitationToken();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
completeInvitation()
|
||||
}, [handoffId, appCode, oauthError, urlToken])
|
||||
completeInvitation();
|
||||
}, [handoffId, appCode, oauthError, urlToken]);
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<StatusCard
|
||||
title="Could not accept invitation"
|
||||
body={error}
|
||||
isError
|
||||
/>
|
||||
)
|
||||
<StatusCard title="Could not accept invitation" body={error} isError />
|
||||
);
|
||||
}
|
||||
|
||||
if (success) {
|
||||
return (
|
||||
<StatusCard
|
||||
title="Invitation accepted!"
|
||||
body={orgSlug ? `Redirecting to ${orgSlug}...` : 'Redirecting...'}
|
||||
body={orgSlug ? `Redirecting to ${orgSlug}...` : "Redirecting..."}
|
||||
isSuccess
|
||||
/>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -99,7 +97,7 @@ export default function InvitationCompletePage() {
|
||||
title="Completing invitation..."
|
||||
body="Processing OAuth callback..."
|
||||
/>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function StatusCard({
|
||||
@@ -108,31 +106,29 @@ function StatusCard({
|
||||
isError = false,
|
||||
isSuccess = false,
|
||||
}: {
|
||||
title: string
|
||||
body: string
|
||||
isError?: boolean
|
||||
isSuccess?: boolean
|
||||
title: string;
|
||||
body: string;
|
||||
isError?: boolean;
|
||||
isSuccess?: boolean;
|
||||
}) {
|
||||
return (
|
||||
<div className="min-h-screen grid place-items-center bg-gray-50 p-4">
|
||||
<div className="max-w-md w-full bg-white shadow rounded-lg p-6">
|
||||
<h2
|
||||
className={`text-lg font-semibold ${isError
|
||||
? 'text-red-600'
|
||||
: isSuccess
|
||||
? 'text-green-600'
|
||||
: 'text-gray-900'
|
||||
}`}
|
||||
className={`text-lg font-semibold ${
|
||||
isError
|
||||
? "text-red-600"
|
||||
: isSuccess
|
||||
? "text-green-600"
|
||||
: "text-gray-900"
|
||||
}`}
|
||||
>
|
||||
{title}
|
||||
</h2>
|
||||
<p className="text-gray-600 mt-2">{body}</p>
|
||||
{isSuccess && (
|
||||
<div className="mt-4 flex items-center text-sm text-gray-500">
|
||||
<svg
|
||||
className="animate-spin h-4 w-4 mr-2"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<svg className="animate-spin h-4 w-4 mr-2" viewBox="0 0 24 24">
|
||||
<circle
|
||||
className="opacity-25"
|
||||
cx="12"
|
||||
@@ -153,5 +149,5 @@ function StatusCard({
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,62 +1,57 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useParams } from 'react-router-dom'
|
||||
import { useEffect, useState } from "react";
|
||||
import { useParams } from "react-router-dom";
|
||||
import {
|
||||
getInvitation,
|
||||
initOAuth,
|
||||
type Invitation,
|
||||
type OAuthProvider,
|
||||
} from '../api'
|
||||
} from "../api";
|
||||
import {
|
||||
generateVerifier,
|
||||
generateChallenge,
|
||||
storeVerifier,
|
||||
storeInvitationToken,
|
||||
} from '../pkce'
|
||||
} from "../pkce";
|
||||
|
||||
export default function InvitationPage() {
|
||||
const { token = '' } = useParams()
|
||||
const [data, setData] = useState<Invitation | null>(null)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [loading, setLoading] = useState(false)
|
||||
const { token = "" } = useParams();
|
||||
const [data, setData] = useState<Invitation | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
getInvitation(token)
|
||||
.then(setData)
|
||||
.catch((e) => setError(e.message))
|
||||
}, [token])
|
||||
.catch((e) => setError(e.message));
|
||||
}, [token]);
|
||||
|
||||
const handleOAuthLogin = async (provider: OAuthProvider) => {
|
||||
setLoading(true)
|
||||
setLoading(true);
|
||||
try {
|
||||
const verifier = generateVerifier()
|
||||
const challenge = await generateChallenge(verifier)
|
||||
const verifier = generateVerifier();
|
||||
const challenge = await generateChallenge(verifier);
|
||||
|
||||
storeVerifier(verifier)
|
||||
storeInvitationToken(token)
|
||||
storeVerifier(verifier);
|
||||
storeInvitationToken(token);
|
||||
|
||||
const appBase =
|
||||
import.meta.env.VITE_APP_BASE_URL || window.location.origin
|
||||
const returnTo = `${appBase}/invitations/${token}/complete`
|
||||
import.meta.env.VITE_APP_BASE_URL || window.location.origin;
|
||||
const returnTo = `${appBase}/invitations/${token}/complete`;
|
||||
|
||||
const result = await initOAuth(provider, returnTo, challenge)
|
||||
window.location.assign(result.authorize_url)
|
||||
const result = await initOAuth(provider, returnTo, challenge);
|
||||
window.location.assign(result.authorize_url);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : 'OAuth init failed')
|
||||
setLoading(false)
|
||||
setError(e instanceof Error ? e.message : "OAuth init failed");
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<ErrorCard
|
||||
title="Invalid or expired invitation"
|
||||
body={error}
|
||||
/>
|
||||
)
|
||||
return <ErrorCard title="Invalid or expired invitation" body={error} />;
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return <LoadingCard text="Loading invitation..." />
|
||||
return <LoadingCard text="Loading invitation..." />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -90,18 +85,18 @@ export default function InvitationPage() {
|
||||
</p>
|
||||
<OAuthButton
|
||||
label="Continue with GitHub"
|
||||
onClick={() => handleOAuthLogin('github')}
|
||||
onClick={() => handleOAuthLogin("github")}
|
||||
disabled={loading}
|
||||
/>
|
||||
<OAuthButton
|
||||
label="Continue with Google"
|
||||
onClick={() => handleOAuthLogin('google')}
|
||||
onClick={() => handleOAuthLogin("google")}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function OAuthButton({
|
||||
@@ -109,9 +104,9 @@ function OAuthButton({
|
||||
onClick,
|
||||
disabled,
|
||||
}: {
|
||||
label: string
|
||||
onClick: () => void
|
||||
disabled?: boolean
|
||||
label: string;
|
||||
onClick: () => void;
|
||||
disabled?: boolean;
|
||||
}) {
|
||||
return (
|
||||
<button
|
||||
@@ -121,7 +116,7 @@ function OAuthButton({
|
||||
>
|
||||
{label}
|
||||
</button>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function LoadingCard({ text }: { text: string }) {
|
||||
@@ -129,7 +124,7 @@ function LoadingCard({ text }: { text: string }) {
|
||||
<div className="min-h-screen grid place-items-center bg-gray-50">
|
||||
<div className="text-gray-600">{text}</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function ErrorCard({ title, body }: { title: string; body?: string }) {
|
||||
@@ -140,5 +135,5 @@ function ErrorCard({ title, body }: { title: string; body?: string }) {
|
||||
{body && <p className="text-gray-600 mt-2">{body}</p>}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@ export default function NotFoundPage() {
|
||||
<p className="text-gray-600 mt-2">Page not found</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
782
remote-frontend/src/pages/OrganizationPage.tsx
Normal file
782
remote-frontend/src/pages/OrganizationPage.tsx
Normal file
@@ -0,0 +1,782 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { Link, useParams, useNavigate, useSearchParams } from "react-router-dom";
|
||||
import { isLoggedIn } from "../auth";
|
||||
import {
|
||||
getOrganization,
|
||||
updateOrganization,
|
||||
deleteOrganization,
|
||||
listMembers,
|
||||
removeMember,
|
||||
updateMemberRole,
|
||||
listInvitations,
|
||||
createInvitation,
|
||||
revokeInvitation,
|
||||
getProfile,
|
||||
getGitHubAppStatus,
|
||||
getGitHubAppInstallUrl,
|
||||
disconnectGitHubApp,
|
||||
type Organization,
|
||||
type OrganizationMemberWithProfile,
|
||||
type OrganizationInvitation,
|
||||
type MemberRole,
|
||||
type GitHubAppStatus,
|
||||
} from "../api";
|
||||
|
||||
export default function OrganizationPage() {
|
||||
const { orgId } = useParams<{ orgId: string }>();
|
||||
const navigate = useNavigate();
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [organization, setOrganization] = useState<Organization | null>(null);
|
||||
const [userRole, setUserRole] = useState<string | null>(null);
|
||||
const [members, setMembers] = useState<OrganizationMemberWithProfile[]>([]);
|
||||
const [invitations, setInvitations] = useState<OrganizationInvitation[]>([]);
|
||||
const [currentUserId, setCurrentUserId] = useState<string | null>(null);
|
||||
|
||||
// GitHub App state
|
||||
const [githubAppStatus, setGithubAppStatus] = useState<GitHubAppStatus | null>(null);
|
||||
const [githubAppLoading, setGithubAppLoading] = useState(false);
|
||||
const [githubAppError, setGithubAppError] = useState<string | null>(null);
|
||||
const [showGithubDisconnectConfirm, setShowGithubDisconnectConfirm] = useState(false);
|
||||
const [githubAppSuccess, setGithubAppSuccess] = useState<string | null>(null);
|
||||
|
||||
// Edit name state
|
||||
const [isEditingName, setIsEditingName] = useState(false);
|
||||
const [editedName, setEditedName] = useState("");
|
||||
const [editNameError, setEditNameError] = useState<string | null>(null);
|
||||
const [editNameLoading, setEditNameLoading] = useState(false);
|
||||
|
||||
// Delete state
|
||||
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||
const [deleteLoading, setDeleteLoading] = useState(false);
|
||||
|
||||
// Invite state
|
||||
const [showInviteForm, setShowInviteForm] = useState(false);
|
||||
const [inviteEmail, setInviteEmail] = useState("");
|
||||
const [inviteRole, setInviteRole] = useState<MemberRole>("MEMBER");
|
||||
const [inviteLoading, setInviteLoading] = useState(false);
|
||||
const [inviteError, setInviteError] = useState<string | null>(null);
|
||||
|
||||
// Action loading states
|
||||
const [actionLoading, setActionLoading] = useState<string | null>(null);
|
||||
|
||||
const isAdmin = userRole === "ADMIN";
|
||||
|
||||
useEffect(() => {
|
||||
if (!isLoggedIn()) {
|
||||
navigate("/account", { replace: true });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!orgId) return;
|
||||
loadData();
|
||||
|
||||
// Check for GitHub App callback params
|
||||
const githubAppResult = searchParams.get("github_app");
|
||||
const githubAppErrorParam = searchParams.get("github_app_error");
|
||||
|
||||
if (githubAppResult === "installed") {
|
||||
setGithubAppSuccess("GitHub App installed successfully!");
|
||||
// Clear the query param
|
||||
searchParams.delete("github_app");
|
||||
setSearchParams(searchParams, { replace: true });
|
||||
}
|
||||
|
||||
if (githubAppErrorParam) {
|
||||
setGithubAppError(githubAppErrorParam);
|
||||
searchParams.delete("github_app_error");
|
||||
setSearchParams(searchParams, { replace: true });
|
||||
}
|
||||
}, [orgId, navigate, searchParams, setSearchParams]);
|
||||
|
||||
async function loadData() {
|
||||
if (!orgId) return;
|
||||
|
||||
try {
|
||||
const [orgData, membersData, profile] = await Promise.all([
|
||||
getOrganization(orgId),
|
||||
listMembers(orgId),
|
||||
getProfile(),
|
||||
]);
|
||||
|
||||
setOrganization(orgData.organization);
|
||||
setUserRole(orgData.user_role);
|
||||
setMembers(membersData);
|
||||
setCurrentUserId(profile.user_id);
|
||||
setEditedName(orgData.organization.name);
|
||||
|
||||
// Load invitations if admin
|
||||
if (orgData.user_role === "ADMIN") {
|
||||
const invitationsData = await listInvitations(orgId);
|
||||
setInvitations(invitationsData.filter((i) => i.status === "PENDING"));
|
||||
}
|
||||
|
||||
// Load GitHub App status for non-personal orgs
|
||||
if (!orgData.organization.is_personal) {
|
||||
try {
|
||||
const ghStatus = await getGitHubAppStatus(orgId);
|
||||
setGithubAppStatus(ghStatus);
|
||||
} catch {
|
||||
// GitHub App may not be configured on the server
|
||||
setGithubAppStatus(null);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to load organization");
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
const handleUpdateName = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (!orgId || !editedName.trim()) return;
|
||||
|
||||
setEditNameLoading(true);
|
||||
setEditNameError(null);
|
||||
|
||||
try {
|
||||
const updated = await updateOrganization(orgId, editedName.trim());
|
||||
setOrganization(updated);
|
||||
setIsEditingName(false);
|
||||
} catch (e) {
|
||||
setEditNameError(e instanceof Error ? e.message : "Failed to update");
|
||||
} finally {
|
||||
setEditNameLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDelete = async () => {
|
||||
if (!orgId) return;
|
||||
|
||||
setDeleteLoading(true);
|
||||
|
||||
try {
|
||||
await deleteOrganization(orgId);
|
||||
navigate("/account", { replace: true });
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to delete");
|
||||
setShowDeleteConfirm(false);
|
||||
setDeleteLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRemoveMember = async (userId: string) => {
|
||||
if (!orgId) return;
|
||||
|
||||
setActionLoading(userId);
|
||||
|
||||
try {
|
||||
await removeMember(orgId, userId);
|
||||
setMembers(members.filter((m) => m.user_id !== userId));
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to remove member");
|
||||
} finally {
|
||||
setActionLoading(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleUpdateRole = async (userId: string, newRole: MemberRole) => {
|
||||
if (!orgId) return;
|
||||
|
||||
setActionLoading(userId);
|
||||
|
||||
try {
|
||||
await updateMemberRole(orgId, userId, newRole);
|
||||
setMembers(
|
||||
members.map((m) => (m.user_id === userId ? { ...m, role: newRole } : m)),
|
||||
);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to update role");
|
||||
} finally {
|
||||
setActionLoading(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleInvite = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (!orgId || !inviteEmail.trim()) return;
|
||||
|
||||
setInviteLoading(true);
|
||||
setInviteError(null);
|
||||
|
||||
try {
|
||||
const invitation = await createInvitation(
|
||||
orgId,
|
||||
inviteEmail.trim(),
|
||||
inviteRole,
|
||||
);
|
||||
setInvitations([...invitations, invitation]);
|
||||
setInviteEmail("");
|
||||
setShowInviteForm(false);
|
||||
} catch (e) {
|
||||
setInviteError(e instanceof Error ? e.message : "Failed to send invite");
|
||||
} finally {
|
||||
setInviteLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRevokeInvitation = async (invitationId: string) => {
|
||||
if (!orgId) return;
|
||||
|
||||
setActionLoading(invitationId);
|
||||
|
||||
try {
|
||||
await revokeInvitation(orgId, invitationId);
|
||||
setInvitations(invitations.filter((i) => i.id !== invitationId));
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to revoke invitation");
|
||||
} finally {
|
||||
setActionLoading(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleInstallGitHubApp = async () => {
|
||||
if (!orgId) return;
|
||||
|
||||
setGithubAppLoading(true);
|
||||
setGithubAppError(null);
|
||||
|
||||
try {
|
||||
const { install_url } = await getGitHubAppInstallUrl(orgId);
|
||||
// Redirect to GitHub to install the app
|
||||
window.location.href = install_url;
|
||||
} catch (e) {
|
||||
setGithubAppError(e instanceof Error ? e.message : "Failed to start installation");
|
||||
setGithubAppLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDisconnectGitHubApp = async () => {
|
||||
if (!orgId) return;
|
||||
|
||||
setGithubAppLoading(true);
|
||||
setGithubAppError(null);
|
||||
|
||||
try {
|
||||
await disconnectGitHubApp(orgId);
|
||||
setGithubAppStatus({ installed: false, installation: null, repositories: [] });
|
||||
setShowGithubDisconnectConfirm(false);
|
||||
setGithubAppSuccess("GitHub App disconnected");
|
||||
} catch (e) {
|
||||
setGithubAppError(e instanceof Error ? e.message : "Failed to disconnect");
|
||||
} finally {
|
||||
setGithubAppLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="min-h-screen grid place-items-center bg-gray-50">
|
||||
<div className="text-gray-600">Loading...</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error && !organization) {
|
||||
return (
|
||||
<div className="min-h-screen grid place-items-center bg-gray-50 p-4">
|
||||
<div className="max-w-md w-full bg-white shadow rounded-lg p-6">
|
||||
<h2 className="text-lg font-semibold text-red-600">Error</h2>
|
||||
<p className="text-gray-600 mt-2">{error}</p>
|
||||
<Link
|
||||
to="/account"
|
||||
className="mt-4 inline-block text-sm text-gray-600 hover:text-gray-900"
|
||||
>
|
||||
← Back to account
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-gray-50 p-4">
|
||||
<div className="max-w-2xl mx-auto space-y-6">
|
||||
{/* Back link */}
|
||||
<Link
|
||||
to="/account"
|
||||
className="inline-flex items-center text-sm text-gray-600 hover:text-gray-900"
|
||||
>
|
||||
<svg
|
||||
className="w-4 h-4 mr-1"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M15 19l-7-7 7-7"
|
||||
/>
|
||||
</svg>
|
||||
Back to account
|
||||
</Link>
|
||||
|
||||
{/* Error banner */}
|
||||
{error && (
|
||||
<div className="bg-red-50 border border-red-200 rounded-lg p-3">
|
||||
<p className="text-sm text-red-600">{error}</p>
|
||||
<button
|
||||
onClick={() => setError(null)}
|
||||
className="text-xs text-red-500 hover:text-red-700 mt-1"
|
||||
>
|
||||
Dismiss
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Organization Details Card */}
|
||||
<div className="bg-white shadow rounded-lg p-6">
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="flex-1">
|
||||
{isEditingName ? (
|
||||
<form onSubmit={handleUpdateName} className="space-y-2">
|
||||
<input
|
||||
type="text"
|
||||
value={editedName}
|
||||
onChange={(e) => setEditedName(e.target.value)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent text-lg font-bold"
|
||||
autoFocus
|
||||
/>
|
||||
{editNameError && (
|
||||
<p className="text-sm text-red-600">{editNameError}</p>
|
||||
)}
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={editNameLoading}
|
||||
className="px-3 py-1.5 bg-gray-900 text-white text-sm rounded-lg hover:bg-gray-800 disabled:opacity-50"
|
||||
>
|
||||
{editNameLoading ? "Saving..." : "Save"}
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
setIsEditingName(false);
|
||||
setEditedName(organization?.name || "");
|
||||
setEditNameError(null);
|
||||
}}
|
||||
className="px-3 py-1.5 text-sm text-gray-600 hover:text-gray-900"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
) : (
|
||||
<div className="flex items-center gap-2">
|
||||
<h1 className="text-xl font-bold text-gray-900">
|
||||
{organization?.name}
|
||||
</h1>
|
||||
{isAdmin && !organization?.is_personal && (
|
||||
<button
|
||||
onClick={() => setIsEditingName(true)}
|
||||
className="text-gray-400 hover:text-gray-600"
|
||||
title="Edit name"
|
||||
>
|
||||
<svg
|
||||
className="w-4 h-4"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M15.232 5.232l3.536 3.536m-2.036-5.036a2.5 2.5 0 113.536 3.536L6.5 21.036H3v-3.572L16.732 3.732z"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<p className="text-gray-600 mt-1">@{organization?.slug}</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{organization?.is_personal && (
|
||||
<span className="text-xs px-2 py-0.5 bg-blue-100 text-blue-700 rounded">
|
||||
Personal
|
||||
</span>
|
||||
)}
|
||||
<span
|
||||
className={`text-xs px-2 py-0.5 rounded ${
|
||||
userRole === "ADMIN"
|
||||
? "bg-green-100 text-green-700"
|
||||
: "bg-gray-100 text-gray-700"
|
||||
}`}
|
||||
>
|
||||
{userRole}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Delete button (admin only, non-personal) */}
|
||||
{isAdmin && !organization?.is_personal && (
|
||||
<div className="mt-6 pt-4 border-t border-gray-200">
|
||||
{showDeleteConfirm ? (
|
||||
<div className="bg-red-50 rounded-lg p-4">
|
||||
<p className="text-sm text-red-700 mb-3">
|
||||
Are you sure you want to delete this organization? This
|
||||
action cannot be undone.
|
||||
</p>
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
onClick={handleDelete}
|
||||
disabled={deleteLoading}
|
||||
className="px-3 py-1.5 bg-red-600 text-white text-sm rounded-lg hover:bg-red-700 disabled:opacity-50"
|
||||
>
|
||||
{deleteLoading ? "Deleting..." : "Yes, delete"}
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setShowDeleteConfirm(false)}
|
||||
className="px-3 py-1.5 text-sm text-gray-600 hover:text-gray-900"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<button
|
||||
onClick={() => setShowDeleteConfirm(true)}
|
||||
className="text-sm text-red-600 hover:text-red-700"
|
||||
>
|
||||
Delete organization
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Members Card */}
|
||||
<div className="bg-white shadow rounded-lg p-6">
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<h2 className="text-lg font-semibold text-gray-900">Members</h2>
|
||||
{isAdmin && !organization?.is_personal && (
|
||||
<button
|
||||
onClick={() => setShowInviteForm(!showInviteForm)}
|
||||
className="text-sm px-3 py-1.5 bg-gray-900 text-white rounded-lg hover:bg-gray-800 transition-colors"
|
||||
>
|
||||
{showInviteForm ? "Cancel" : "Invite Member"}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Invite form */}
|
||||
{showInviteForm && (
|
||||
<form
|
||||
onSubmit={handleInvite}
|
||||
className="mb-4 p-4 bg-gray-50 rounded-lg space-y-3"
|
||||
>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Email address
|
||||
</label>
|
||||
<input
|
||||
type="email"
|
||||
value={inviteEmail}
|
||||
onChange={(e) => setInviteEmail(e.target.value)}
|
||||
placeholder="user@example.com"
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Role
|
||||
</label>
|
||||
<select
|
||||
value={inviteRole}
|
||||
onChange={(e) => setInviteRole(e.target.value as MemberRole)}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-gray-900 focus:border-transparent"
|
||||
>
|
||||
<option value="MEMBER">Member</option>
|
||||
<option value="ADMIN">Admin</option>
|
||||
</select>
|
||||
</div>
|
||||
{inviteError && (
|
||||
<p className="text-sm text-red-600">{inviteError}</p>
|
||||
)}
|
||||
<button
|
||||
type="submit"
|
||||
disabled={inviteLoading}
|
||||
className="w-full py-2 px-4 bg-gray-900 text-white rounded-lg hover:bg-gray-800 transition-colors font-medium disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{inviteLoading ? "Sending..." : "Send Invitation"}
|
||||
</button>
|
||||
</form>
|
||||
)}
|
||||
|
||||
{/* Members list */}
|
||||
<div className="divide-y divide-gray-200">
|
||||
{members.map((member) => (
|
||||
<div
|
||||
key={member.user_id}
|
||||
className="flex items-center justify-between py-3"
|
||||
>
|
||||
<div className="flex items-center gap-3">
|
||||
{member.avatar_url ? (
|
||||
<img
|
||||
src={member.avatar_url}
|
||||
alt=""
|
||||
className="w-8 h-8 rounded-full"
|
||||
/>
|
||||
) : (
|
||||
<div className="w-8 h-8 rounded-full bg-gray-200 flex items-center justify-center">
|
||||
<span className="text-xs text-gray-500">
|
||||
{(member.first_name?.[0] || member.email?.[0] || "?").toUpperCase()}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<p className="font-medium text-gray-900">
|
||||
{member.first_name || member.username || member.email}
|
||||
{member.user_id === currentUserId && (
|
||||
<span className="text-gray-500 font-normal">
|
||||
{" "}
|
||||
(you)
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
<p className="text-sm text-gray-500">{member.email}</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{isAdmin && !organization?.is_personal ? (
|
||||
<>
|
||||
<select
|
||||
value={member.role}
|
||||
onChange={(e) =>
|
||||
handleUpdateRole(
|
||||
member.user_id,
|
||||
e.target.value as MemberRole,
|
||||
)
|
||||
}
|
||||
disabled={
|
||||
actionLoading === member.user_id ||
|
||||
member.user_id === currentUserId
|
||||
}
|
||||
className="text-xs px-2 py-1 border border-gray-300 rounded focus:outline-none focus:ring-1 focus:ring-gray-900 disabled:opacity-50"
|
||||
>
|
||||
<option value="MEMBER">Member</option>
|
||||
<option value="ADMIN">Admin</option>
|
||||
</select>
|
||||
{member.user_id !== currentUserId && (
|
||||
<button
|
||||
onClick={() => handleRemoveMember(member.user_id)}
|
||||
disabled={actionLoading === member.user_id}
|
||||
className="text-red-600 hover:text-red-700 text-sm disabled:opacity-50"
|
||||
>
|
||||
Remove
|
||||
</button>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<span
|
||||
className={`text-xs px-2 py-0.5 rounded ${
|
||||
member.role === "ADMIN"
|
||||
? "bg-green-100 text-green-700"
|
||||
: "bg-gray-100 text-gray-700"
|
||||
}`}
|
||||
>
|
||||
{member.role}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Pending Invitations Card (admin only) */}
|
||||
{isAdmin && invitations.length > 0 && (
|
||||
<div className="bg-white shadow rounded-lg p-6">
|
||||
<h2 className="text-lg font-semibold text-gray-900 mb-4">
|
||||
Pending Invitations
|
||||
</h2>
|
||||
<div className="divide-y divide-gray-200">
|
||||
{invitations.map((invitation) => (
|
||||
<div
|
||||
key={invitation.id}
|
||||
className="flex items-center justify-between py-3"
|
||||
>
|
||||
<div>
|
||||
<p className="font-medium text-gray-900">
|
||||
{invitation.email}
|
||||
</p>
|
||||
<p className="text-sm text-gray-500">
|
||||
Role: {invitation.role} · Expires{" "}
|
||||
{new Date(invitation.expires_at).toLocaleDateString()}
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => handleRevokeInvitation(invitation.id)}
|
||||
disabled={actionLoading === invitation.id}
|
||||
className="text-red-600 hover:text-red-700 text-sm disabled:opacity-50"
|
||||
>
|
||||
{actionLoading === invitation.id ? "..." : "Revoke"}
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* GitHub Integration Card (admin only, non-personal orgs) */}
|
||||
{isAdmin && !organization?.is_personal && githubAppStatus !== null && (
|
||||
<div className="bg-white shadow rounded-lg p-6">
|
||||
<h2 className="text-lg font-semibold text-gray-900 mb-2">
|
||||
GitHub Integration
|
||||
</h2>
|
||||
<p className="text-sm text-gray-600 mb-4">
|
||||
Connect a GitHub App to automatically track pull requests from your repositories.
|
||||
</p>
|
||||
|
||||
{/* Success message */}
|
||||
{githubAppSuccess && (
|
||||
<div className="mb-4 p-3 bg-green-50 border border-green-200 rounded-lg">
|
||||
<p className="text-sm text-green-700">{githubAppSuccess}</p>
|
||||
<button
|
||||
onClick={() => setGithubAppSuccess(null)}
|
||||
className="text-xs text-green-600 hover:text-green-800 mt-1"
|
||||
>
|
||||
Dismiss
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Error message */}
|
||||
{githubAppError && (
|
||||
<div className="mb-4 p-3 bg-red-50 border border-red-200 rounded-lg">
|
||||
<p className="text-sm text-red-700">{githubAppError}</p>
|
||||
<button
|
||||
onClick={() => setGithubAppError(null)}
|
||||
className="text-xs text-red-600 hover:text-red-800 mt-1"
|
||||
>
|
||||
Dismiss
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{githubAppStatus.installed && githubAppStatus.installation ? (
|
||||
// Installed state
|
||||
<div>
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg
|
||||
className="w-5 h-5 text-gray-800"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path d="M12 0c-6.626 0-12 5.373-12 12 0 5.302 3.438 9.8 8.207 11.387.599.111.793-.261.793-.577v-2.234c-3.338.726-4.033-1.416-4.033-1.416-.546-1.387-1.333-1.756-1.333-1.756-1.089-.745.083-.729.083-.729 1.205.084 1.839 1.237 1.839 1.237 1.07 1.834 2.807 1.304 3.492.997.107-.775.418-1.305.762-1.604-2.665-.305-5.467-1.334-5.467-5.931 0-1.311.469-2.381 1.236-3.221-.124-.303-.535-1.524.117-3.176 0 0 1.008-.322 3.301 1.23.957-.266 1.983-.399 3.003-.404 1.02.005 2.047.138 3.006.404 2.291-1.552 3.297-1.23 3.297-1.23.653 1.653.242 2.874.118 3.176.77.84 1.235 1.911 1.235 3.221 0 4.609-2.807 5.624-5.479 5.921.43.372.823 1.102.823 2.222v3.293c0 .319.192.694.801.576 4.765-1.589 8.199-6.086 8.199-11.386 0-6.627-5.373-12-12-12z" />
|
||||
</svg>
|
||||
<span className="font-medium text-gray-900">
|
||||
@{githubAppStatus.installation.github_account_login}
|
||||
</span>
|
||||
{githubAppStatus.installation.suspended_at && (
|
||||
<span className="px-2 py-0.5 text-xs bg-yellow-100 text-yellow-700 rounded">
|
||||
Suspended
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<span className="px-2 py-0.5 text-xs bg-green-100 text-green-700 rounded">
|
||||
Connected
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="text-sm text-gray-600 mb-4">
|
||||
{githubAppStatus.installation.repository_selection === "all" ? (
|
||||
<p>All repositories are being monitored.</p>
|
||||
) : (
|
||||
<p>
|
||||
{githubAppStatus.repositories.length} selected{" "}
|
||||
{githubAppStatus.repositories.length === 1
|
||||
? "repository"
|
||||
: "repositories"}{" "}
|
||||
being monitored.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Repository list (if selected) */}
|
||||
{githubAppStatus.installation.repository_selection === "selected" &&
|
||||
githubAppStatus.repositories.length > 0 && (
|
||||
<div className="mb-4 p-3 bg-gray-50 rounded-lg">
|
||||
<p className="text-xs font-medium text-gray-500 mb-2">
|
||||
Monitored repositories:
|
||||
</p>
|
||||
<div className="space-y-1">
|
||||
{githubAppStatus.repositories.slice(0, 5).map((repo) => (
|
||||
<p key={repo.id} className="text-sm text-gray-700">
|
||||
{repo.repo_full_name}
|
||||
</p>
|
||||
))}
|
||||
{githubAppStatus.repositories.length > 5 && (
|
||||
<p className="text-xs text-gray-500">
|
||||
+{githubAppStatus.repositories.length - 5} more
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Disconnect section */}
|
||||
{showGithubDisconnectConfirm ? (
|
||||
<div className="bg-red-50 rounded-lg p-4">
|
||||
<p className="text-sm text-red-700 mb-3">
|
||||
Are you sure you want to disconnect the GitHub App? You will need
|
||||
to reinstall it from GitHub to reconnect.
|
||||
</p>
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
onClick={handleDisconnectGitHubApp}
|
||||
disabled={githubAppLoading}
|
||||
className="px-3 py-1.5 bg-red-600 text-white text-sm rounded-lg hover:bg-red-700 disabled:opacity-50"
|
||||
>
|
||||
{githubAppLoading ? "Disconnecting..." : "Yes, disconnect"}
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setShowGithubDisconnectConfirm(false)}
|
||||
className="px-3 py-1.5 text-sm text-gray-600 hover:text-gray-900"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<button
|
||||
onClick={() => setShowGithubDisconnectConfirm(true)}
|
||||
className="text-sm text-red-600 hover:text-red-700"
|
||||
>
|
||||
Disconnect GitHub App
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
// Not installed state
|
||||
<div>
|
||||
<button
|
||||
onClick={handleInstallGitHubApp}
|
||||
disabled={githubAppLoading}
|
||||
className="inline-flex items-center gap-2 px-4 py-2 bg-gray-900 text-white rounded-lg hover:bg-gray-800 transition-colors disabled:opacity-50"
|
||||
>
|
||||
<svg
|
||||
className="w-5 h-5"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path d="M12 0c-6.626 0-12 5.373-12 12 0 5.302 3.438 9.8 8.207 11.387.599.111.793-.261.793-.577v-2.234c-3.338.726-4.033-1.416-4.033-1.416-.546-1.387-1.333-1.756-1.333-1.756-1.089-.745.083-.729.083-.729 1.205.084 1.839 1.237 1.839 1.237 1.07 1.834 2.807 1.304 3.492.997.107-.775.418-1.305.762-1.604-2.665-.305-5.467-1.334-5.467-5.931 0-1.311.469-2.381 1.236-3.221-.124-.303-.535-1.524.117-3.176 0 0 1.008-.322 3.301 1.23.957-.266 1.983-.399 3.003-.404 1.02.005 2.047.138 3.006.404 2.291-1.552 3.297-1.23 3.297-1.23.653 1.653.242 2.874.118 3.176.77.84 1.235 1.911 1.235 3.221 0 4.609-2.807 5.624-5.479 5.921.43.372.823 1.102.823 2.222v3.293c0 .319.192.694.801.576 4.765-1.589 8.199-6.086 8.199-11.386 0-6.627-5.373-12-12-12z" />
|
||||
</svg>
|
||||
{githubAppLoading ? "Loading..." : "Connect GitHub App"}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
614
remote-frontend/src/pages/ReviewPage.tsx
Normal file
614
remote-frontend/src/pages/ReviewPage.tsx
Normal file
@@ -0,0 +1,614 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { useParams } from "react-router-dom";
|
||||
import { DiffView, DiffModeEnum } from "@git-diff-view/react";
|
||||
import "@git-diff-view/react/styles/diff-view.css";
|
||||
import "../styles/diff-overrides.css";
|
||||
import {
|
||||
getReview,
|
||||
getFileContent,
|
||||
getDiff,
|
||||
getReviewMetadata,
|
||||
type ReviewMetadata,
|
||||
} from "../api";
|
||||
import type { ReviewResult, ReviewComment } from "../types/review";
|
||||
import { MarkdownRenderer } from "../components/MarkdownRenderer";
|
||||
import {
|
||||
parseUnifiedDiff,
|
||||
getFileDiff,
|
||||
synthesizeFragmentDiff,
|
||||
type ParsedFileDiff,
|
||||
} from "../lib/diff-parser";
|
||||
import { getHighlightLanguageFromPath } from "../lib/extToLanguage";
|
||||
import { CodeFragmentCard } from "../components/CodeFragmentCard";
|
||||
import { cn } from "../lib/utils";
|
||||
|
||||
const DIFF_VIEW_MODE_KEY = "diff-view-mode";
|
||||
|
||||
function diffHasChanges(diffString: string): boolean {
|
||||
return diffString.split("\n").some((line) => {
|
||||
if (!line) return false;
|
||||
if (
|
||||
line.startsWith("--- ") ||
|
||||
line.startsWith("+++ ") ||
|
||||
line.startsWith("@@")
|
||||
)
|
||||
return false;
|
||||
return line[0] === "+" || line[0] === "-";
|
||||
});
|
||||
}
|
||||
|
||||
type FileCache = Map<string, string>;
|
||||
|
||||
export default function ReviewPage() {
|
||||
const { id } = useParams<{ id: string }>();
|
||||
const [review, setReview] = useState<ReviewResult | null>(null);
|
||||
const [metadata, setMetadata] = useState<ReviewMetadata | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [fileCache, setFileCache] = useState<FileCache>(new Map());
|
||||
const [loadingFiles, setLoadingFiles] = useState<Set<string>>(new Set());
|
||||
const [scrollProgress, setScrollProgress] = useState(0);
|
||||
const [diffText, setDiffText] = useState<string>("");
|
||||
const [diffViewMode, setDiffViewMode] = useState<DiffModeEnum>(() => {
|
||||
const saved = localStorage.getItem(DIFF_VIEW_MODE_KEY);
|
||||
return saved === "split" ? DiffModeEnum.Split : DiffModeEnum.Unified;
|
||||
});
|
||||
const fetchingFiles = useRef<Set<string>>(new Set());
|
||||
|
||||
const parsedDiffs = useMemo(() => parseUnifiedDiff(diffText), [diffText]);
|
||||
|
||||
const handleViewModeChange = useCallback((mode: DiffModeEnum) => {
|
||||
setDiffViewMode(mode);
|
||||
localStorage.setItem(
|
||||
DIFF_VIEW_MODE_KEY,
|
||||
mode === DiffModeEnum.Split ? "split" : "unified",
|
||||
);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!id) return;
|
||||
// Skip refetch if we already have data for this review (e.g., during HMR)
|
||||
if (review) return;
|
||||
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
Promise.all([getReview(id), getDiff(id), getReviewMetadata(id)])
|
||||
.then(([reviewData, diffData, metadataData]) => {
|
||||
setReview(reviewData);
|
||||
setDiffText(diffData);
|
||||
setMetadata(metadataData);
|
||||
setLoading(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
setError(err.message || "Failed to load review");
|
||||
setLoading(false);
|
||||
});
|
||||
}, [id, review]);
|
||||
|
||||
const pathToHash = useMemo(() => {
|
||||
if (!review) return new Map<string, string>();
|
||||
const map = new Map<string, string>();
|
||||
for (const [hash, path] of Object.entries(review.fileHashMap)) {
|
||||
map.set(path, hash);
|
||||
}
|
||||
return map;
|
||||
}, [review]);
|
||||
|
||||
const fetchFile = useCallback(
|
||||
async (filePath: string) => {
|
||||
if (!id || !review) return;
|
||||
|
||||
const hash = pathToHash.get(filePath);
|
||||
if (!hash) return;
|
||||
if (fetchingFiles.current.has(filePath)) return;
|
||||
|
||||
fetchingFiles.current.add(filePath);
|
||||
setLoadingFiles((prev) => new Set(prev).add(filePath));
|
||||
|
||||
try {
|
||||
const content = await getFileContent(id, hash);
|
||||
setFileCache((prev) => new Map(prev).set(filePath, content));
|
||||
} catch (err) {
|
||||
console.error(`Failed to fetch file ${filePath}:`, err);
|
||||
} finally {
|
||||
fetchingFiles.current.delete(filePath);
|
||||
setLoadingFiles((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.delete(filePath);
|
||||
return next;
|
||||
});
|
||||
}
|
||||
},
|
||||
[id, review, pathToHash],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!review) return;
|
||||
|
||||
const allFiles = new Set<string>();
|
||||
for (const comment of review.comments) {
|
||||
for (const fragment of comment.fragments) {
|
||||
allFiles.add(fragment.file);
|
||||
}
|
||||
}
|
||||
|
||||
for (const filePath of allFiles) {
|
||||
fetchFile(filePath);
|
||||
}
|
||||
}, [review, fetchFile]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleScroll = () => {
|
||||
const scrollTop = window.scrollY;
|
||||
const docHeight =
|
||||
document.documentElement.scrollHeight - window.innerHeight;
|
||||
const progress = docHeight > 0 ? Math.min(1, scrollTop / docHeight) : 0;
|
||||
setScrollProgress(progress);
|
||||
};
|
||||
|
||||
window.addEventListener("scroll", handleScroll, { passive: true });
|
||||
handleScroll();
|
||||
return () => window.removeEventListener("scroll", handleScroll);
|
||||
}, []);
|
||||
|
||||
// Parse PR metadata from the GitHub URL
|
||||
const prMetadata = useMemo(() => {
|
||||
if (!metadata) {
|
||||
return { org: "", repo: "", number: 0, title: "" };
|
||||
}
|
||||
// Parse gh_pr_url: https://github.com/owner/repo/pull/123
|
||||
const match = metadata.gh_pr_url.match(
|
||||
/github\.com\/([^/]+)\/([^/]+)\/pull\/(\d+)/,
|
||||
);
|
||||
if (match) {
|
||||
return {
|
||||
org: match[1],
|
||||
repo: match[2],
|
||||
number: parseInt(match[3], 10),
|
||||
title: metadata.pr_title,
|
||||
};
|
||||
}
|
||||
return { org: "", repo: "", number: 0, title: metadata.pr_title };
|
||||
}, [metadata]);
|
||||
|
||||
useEffect(() => {
|
||||
if (review && prMetadata.title) {
|
||||
document.title = `Review: ${prMetadata.title} · ${prMetadata.org}/${prMetadata.repo}#${prMetadata.number}`;
|
||||
}
|
||||
}, [review, prMetadata]);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="min-h-screen bg-background flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-foreground mx-auto mb-4"></div>
|
||||
<p className="text-muted-foreground text-sm">Loading review...</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error || !review) {
|
||||
return (
|
||||
<div className="min-h-screen bg-background flex items-center justify-center p-4">
|
||||
<div className="text-center max-w-md">
|
||||
<div className="w-12 h-12 rounded-full bg-destructive/10 flex items-center justify-center mx-auto mb-4">
|
||||
<svg
|
||||
className="w-6 h-6 text-destructive"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<h1 className="text-lg font-semibold text-foreground mb-2">
|
||||
{error || "Review not found"}
|
||||
</h1>
|
||||
<p className="text-muted-foreground text-sm">
|
||||
The review you're looking for doesn't exist or has been removed.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const prUrl =
|
||||
metadata?.gh_pr_url ||
|
||||
`https://github.com/${prMetadata.org}/${prMetadata.repo}/pull/${prMetadata.number}`;
|
||||
const hasDiff = parsedDiffs.length > 0;
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-background flex flex-col">
|
||||
{/* Scroll Progress Bar */}
|
||||
<div className="fixed top-0 left-0 right-0 h-1 bg-muted z-50">
|
||||
<div
|
||||
className="h-full bg-primary transition-[width] duration-75"
|
||||
style={{ width: `${scrollProgress * 100}%` }}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Header - Two Column Layout - Full Height */}
|
||||
<div className="min-h-screen border-b px-4 py-5 mt-1 flex flex-col justify-center items-center">
|
||||
<div className="w-full max-w-2xl p-8 space-y-40">
|
||||
<div className="space-y-4">
|
||||
<div className="flex gap-2 items-center text-secondary-foreground">
|
||||
<svg
|
||||
className="h-4 w-4 shrink-0"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 16 16"
|
||||
>
|
||||
<path d="M2 2.5A2.5 2.5 0 0 1 4.5 0h8.75a.75.75 0 0 1 .75.75v12.5a.75.75 0 0 1-.75.75h-2.5a.75.75 0 0 1 0-1.5h1.75v-2h-8a1 1 0 0 0-.714 1.7.75.75 0 1 1-1.072 1.05A2.495 2.495 0 0 1 2 11.5Zm10.5-1h-8a1 1 0 0 0-1 1v6.708A2.486 2.486 0 0 1 4.5 9h8ZM5 12.25a.25.25 0 0 1 .25-.25h3.5a.25.25 0 0 1 .25.25v3.25a.25.25 0 0 1-.4.2l-1.45-1.087a.249.249 0 0 0-.3 0L5.4 15.7a.25.25 0 0 1-.4-.2Z" />
|
||||
</svg>
|
||||
<h2>
|
||||
<a
|
||||
href={prUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="hover:underline"
|
||||
>
|
||||
{prMetadata.org}/{prMetadata.repo}
|
||||
</a>
|
||||
</h2>
|
||||
</div>
|
||||
<div className="border-b pb-4">
|
||||
<h1 className="text-2xl">
|
||||
{prMetadata.title} (
|
||||
<a
|
||||
href={prUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="hover:underline"
|
||||
>
|
||||
#{prMetadata.number}
|
||||
</a>
|
||||
)
|
||||
</h1>
|
||||
</div>
|
||||
<div>
|
||||
<MarkdownRenderer
|
||||
content={review.summary}
|
||||
className="text-base text-secondary-foreground"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div className="bg-muted p-4 rounded-sm space-y-2 border">
|
||||
<a href="https://review.fast" target="_blank">
|
||||
<img
|
||||
src="/review_fast_logo_dark.svg"
|
||||
alt="Logo"
|
||||
className="w-32"
|
||||
/>
|
||||
</a>
|
||||
<p className="text-base text-secondary-foreground">
|
||||
To make this PR easier to understand and review, an AI agent has
|
||||
written a <i>review story</i>. The changes are presented in a
|
||||
clear, logical order, with concise, AI-generated comments that
|
||||
explain context and highlight what matters.{" "}
|
||||
<a
|
||||
href="https://review.fast"
|
||||
className="text-primary-foreground"
|
||||
target="_blank"
|
||||
>
|
||||
Learn more.
|
||||
</a>
|
||||
</p>
|
||||
<p className="text-base">Please scroll to begin</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Comments List - Two Column Grid Layout */}
|
||||
{review.comments.map((comment, idx) => (
|
||||
<CommentStoryRow
|
||||
key={idx}
|
||||
index={idx + 1}
|
||||
totalComments={review.comments.length}
|
||||
comment={comment}
|
||||
fileCache={fileCache}
|
||||
loadingFiles={loadingFiles}
|
||||
parsedDiffs={parsedDiffs}
|
||||
hasDiff={hasDiff}
|
||||
diffViewMode={diffViewMode}
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Footer - Promotional */}
|
||||
<div className="border-t px-4 py-6 bg-muted/30 pb-16">
|
||||
<div className="text-center">
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
Generate AI-powered code reviews for your pull requests
|
||||
</p>
|
||||
<code className="inline-block bg-secondary px-3 py-2 rounded-md text-sm font-mono text-foreground">
|
||||
npx vibe-kanban review https://github.com/owner/repo/pull/123
|
||||
</code>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Fixed Footer Toolbar */}
|
||||
<div className="fixed bottom-0 left-0 right-0 bg-background/95 backdrop-blur border-t z-50 px-4 py-2 opacity-50 hover:opacity-100 transition-opacity">
|
||||
<div className="flex justify-between items-center">
|
||||
{/* Left: Logo */}
|
||||
<a
|
||||
href="https://review.fast"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
<img
|
||||
src="/review_fast_logo_dark.svg"
|
||||
alt="review.fast"
|
||||
className="h-3"
|
||||
/>
|
||||
</a>
|
||||
|
||||
{/* Right: View Toggle */}
|
||||
<div className="flex items-center gap-1">
|
||||
<span className="text-sm text-muted-foreground mr-2">View:</span>
|
||||
<button
|
||||
onClick={() => handleViewModeChange(DiffModeEnum.Unified)}
|
||||
className={cn(
|
||||
"px-3 py-1 text-sm rounded-l border",
|
||||
diffViewMode === DiffModeEnum.Unified
|
||||
? "bg-primary text-primary-foreground"
|
||||
: "bg-muted text-muted-foreground hover:bg-muted/80",
|
||||
)}
|
||||
>
|
||||
Unified
|
||||
</button>
|
||||
<button
|
||||
onClick={() => handleViewModeChange(DiffModeEnum.Split)}
|
||||
className={cn(
|
||||
"px-3 py-1 text-sm rounded-r border-t border-r border-b",
|
||||
diffViewMode === DiffModeEnum.Split
|
||||
? "bg-primary text-primary-foreground"
|
||||
: "bg-muted text-muted-foreground hover:bg-muted/80",
|
||||
)}
|
||||
>
|
||||
Split
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface CommentStoryRowProps {
|
||||
index: number;
|
||||
totalComments: number;
|
||||
comment: ReviewComment;
|
||||
fileCache: FileCache;
|
||||
loadingFiles: Set<string>;
|
||||
parsedDiffs: ParsedFileDiff[];
|
||||
hasDiff: boolean;
|
||||
diffViewMode: DiffModeEnum;
|
||||
}
|
||||
|
||||
function CommentStoryRow({
|
||||
index,
|
||||
totalComments,
|
||||
comment,
|
||||
fileCache,
|
||||
loadingFiles,
|
||||
parsedDiffs,
|
||||
hasDiff,
|
||||
diffViewMode,
|
||||
}: CommentStoryRowProps) {
|
||||
const hasComment = comment.comment && comment.comment.trim().length > 0;
|
||||
|
||||
return (
|
||||
<div className="min-h-screen flex flex-row justify-center px-8 2xl:px-[10vw] space-x-8 2xl:space-x-[5vw]">
|
||||
<div className="flex-1 flex w-1/2 2xl:w-1/3">
|
||||
<div className="h-screen sticky top-0 flex items-center">
|
||||
<div className="flex space-x-4">
|
||||
<span className="inline-flex items-center justify-center h-12 w-12 rounded-full bg-muted shrink-0">
|
||||
<div className="inline-flex items-baseline text-primary-foreground text-xl font-bold">
|
||||
{index}
|
||||
<span className="text-sm text-muted-foreground font-medium">
|
||||
/{totalComments}
|
||||
</span>
|
||||
</div>
|
||||
</span>
|
||||
{hasComment ? (
|
||||
<MarkdownRenderer
|
||||
content={comment.comment}
|
||||
className="text-lg min-w-0 text-secondary-foreground"
|
||||
/>
|
||||
) : (
|
||||
<span className="text-sm text-muted-foreground italic">
|
||||
(No comment text)
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Right Column - Code Fragments */}
|
||||
<div className="pt-[100vh] pb-[50vh] w-1/2 2xl:w-2/3 space-y-[50vh]">
|
||||
{comment.fragments.length > 0 ? (
|
||||
comment.fragments.map((fragment, fIdx) => (
|
||||
<DiffFragmentCard
|
||||
key={`${fragment.file}:${fragment.start_line}-${fragment.end_line}:${fIdx}`}
|
||||
file={fragment.file}
|
||||
startLine={fragment.start_line}
|
||||
endLine={fragment.end_line}
|
||||
message={fragment.message}
|
||||
parsedDiffs={parsedDiffs}
|
||||
fileContent={fileCache.get(fragment.file)}
|
||||
isLoading={loadingFiles.has(fragment.file)}
|
||||
hasDiff={hasDiff}
|
||||
diffViewMode={diffViewMode}
|
||||
/>
|
||||
))
|
||||
) : (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
No code fragments for this comment.
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface DiffFragmentCardProps {
|
||||
file: string;
|
||||
startLine: number;
|
||||
endLine: number;
|
||||
message: string;
|
||||
parsedDiffs: ParsedFileDiff[];
|
||||
fileContent?: string;
|
||||
isLoading?: boolean;
|
||||
hasDiff: boolean;
|
||||
diffViewMode: DiffModeEnum;
|
||||
}
|
||||
|
||||
function DiffFragmentCard({
|
||||
file,
|
||||
startLine,
|
||||
endLine,
|
||||
message,
|
||||
parsedDiffs,
|
||||
fileContent,
|
||||
isLoading,
|
||||
hasDiff,
|
||||
diffViewMode,
|
||||
}: DiffFragmentCardProps) {
|
||||
const fileDiff = useMemo(
|
||||
() => getFileDiff(parsedDiffs, file),
|
||||
[parsedDiffs, file],
|
||||
);
|
||||
const lang = getHighlightLanguageFromPath(file);
|
||||
|
||||
const diffData = useMemo(() => {
|
||||
if (!fileDiff) return null;
|
||||
|
||||
if (!fileContent) return null;
|
||||
|
||||
const diffString = synthesizeFragmentDiff(
|
||||
fileDiff,
|
||||
fileContent,
|
||||
startLine,
|
||||
endLine,
|
||||
3,
|
||||
);
|
||||
|
||||
if (!diffString) return null;
|
||||
|
||||
return {
|
||||
hasChanges: diffHasChanges(diffString),
|
||||
hunks: [diffString],
|
||||
oldFile: { fileName: file, fileLang: lang },
|
||||
newFile: { fileName: file, fileLang: lang },
|
||||
};
|
||||
}, [fileDiff, file, lang, startLine, endLine, fileContent]);
|
||||
|
||||
if (!hasDiff || !fileDiff) {
|
||||
return (
|
||||
<div className="border rounded bg-muted/40 p-3">
|
||||
<div className="flex items-center gap-2 text-xs text-muted-foreground">
|
||||
<span className="font-mono truncate">{file}</span>
|
||||
<span className="shrink-0">
|
||||
Lines {startLine}
|
||||
{endLine !== startLine && `–${endLine}`}
|
||||
</span>
|
||||
</div>
|
||||
{message && (
|
||||
<div className="flex items-start gap-1.5 text-xs text-amber-600 dark:text-amber-400 mt-1.5 italic">
|
||||
<svg
|
||||
className="h-3.5 w-3.5 shrink-0 mt-0.5"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M7 8h10M7 12h4m1 8l-4-4H5a2 2 0 01-2-2V6a2 2 0 012-2h14a2 2 0 012 2v8a2 2 0 01-2 2h-3l-4 4z"
|
||||
/>
|
||||
</svg>
|
||||
<span>{message}</span>
|
||||
</div>
|
||||
)}
|
||||
{isLoading ? (
|
||||
<div className="mt-2 flex items-center gap-2">
|
||||
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-muted-foreground/60"></div>
|
||||
<span className="text-xs text-muted-foreground">Loading...</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="mt-2 text-xs text-muted-foreground">
|
||||
No diff available for this file.
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col space-y-4">
|
||||
<div className="flex">
|
||||
<div className="font-mono bg-muted py-1 px-2 rounded-sm border text-secondary-foreground break-words max-w-full">
|
||||
{file}
|
||||
</div>
|
||||
</div>
|
||||
{message && (
|
||||
<div>
|
||||
<span>
|
||||
<MarkdownRenderer content={message} />
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
<div className="border rounded bg-muted/40 overflow-hidden">
|
||||
{diffData ? (
|
||||
diffData.hasChanges ? (
|
||||
<div className="diff-view-container">
|
||||
<DiffView
|
||||
data={diffData}
|
||||
diffViewMode={diffViewMode}
|
||||
diffViewTheme="dark"
|
||||
diffViewHighlight
|
||||
diffViewFontSize={12}
|
||||
diffViewWrap={false}
|
||||
/>
|
||||
</div>
|
||||
) : fileContent ? (
|
||||
<CodeFragmentCard
|
||||
fragment={{
|
||||
file,
|
||||
start_line: startLine,
|
||||
end_line: endLine,
|
||||
message: "",
|
||||
}}
|
||||
fileContent={fileContent}
|
||||
isLoading={isLoading}
|
||||
hideHeader
|
||||
/>
|
||||
) : (
|
||||
<div className="px-3 py-4 text-xs text-muted-foreground">
|
||||
No changes in this fragment range.
|
||||
</div>
|
||||
)
|
||||
) : isLoading ? (
|
||||
<div className="px-3 py-4 flex items-center gap-2">
|
||||
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-muted-foreground/60"></div>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
Loading file content...
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="px-3 py-4 text-xs text-muted-foreground">
|
||||
No diff hunks match this fragment range.
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,55 +1,52 @@
|
||||
export function generateVerifier(): string {
|
||||
const array = new Uint8Array(32)
|
||||
crypto.getRandomValues(array)
|
||||
return base64UrlEncode(array)
|
||||
const array = new Uint8Array(32);
|
||||
crypto.getRandomValues(array);
|
||||
return base64UrlEncode(array);
|
||||
}
|
||||
|
||||
export async function generateChallenge(verifier: string): Promise<string> {
|
||||
const encoder = new TextEncoder()
|
||||
const data = encoder.encode(verifier)
|
||||
const hash = await crypto.subtle.digest('SHA-256', data)
|
||||
return bytesToHex(new Uint8Array(hash))
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(verifier);
|
||||
const hash = await crypto.subtle.digest("SHA-256", data);
|
||||
return bytesToHex(new Uint8Array(hash));
|
||||
}
|
||||
|
||||
function base64UrlEncode(array: Uint8Array): string {
|
||||
const base64 = btoa(String.fromCharCode(...array))
|
||||
return base64
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/=/g, '')
|
||||
const base64 = btoa(String.fromCharCode(...array));
|
||||
return base64.replace(/\+/g, "-").replace(/\//g, "_").replace(/=/g, "");
|
||||
}
|
||||
|
||||
function bytesToHex(bytes: Uint8Array): string {
|
||||
let out = ''
|
||||
let out = "";
|
||||
for (let i = 0; i < bytes.length; i++) {
|
||||
out += bytes[i].toString(16).padStart(2, '0')
|
||||
out += bytes[i].toString(16).padStart(2, "0");
|
||||
}
|
||||
return out
|
||||
return out;
|
||||
}
|
||||
|
||||
const VERIFIER_KEY = 'oauth_verifier'
|
||||
const TOKEN_KEY = 'invitation_token'
|
||||
const VERIFIER_KEY = "oauth_verifier";
|
||||
const TOKEN_KEY = "invitation_token";
|
||||
|
||||
export function storeVerifier(verifier: string): void {
|
||||
sessionStorage.setItem(VERIFIER_KEY, verifier)
|
||||
sessionStorage.setItem(VERIFIER_KEY, verifier);
|
||||
}
|
||||
|
||||
export function retrieveVerifier(): string | null {
|
||||
return sessionStorage.getItem(VERIFIER_KEY)
|
||||
return sessionStorage.getItem(VERIFIER_KEY);
|
||||
}
|
||||
|
||||
export function clearVerifier(): void {
|
||||
sessionStorage.removeItem(VERIFIER_KEY)
|
||||
sessionStorage.removeItem(VERIFIER_KEY);
|
||||
}
|
||||
|
||||
export function storeInvitationToken(token: string): void {
|
||||
sessionStorage.setItem(TOKEN_KEY, token)
|
||||
sessionStorage.setItem(TOKEN_KEY, token);
|
||||
}
|
||||
|
||||
export function retrieveInvitationToken(): string | null {
|
||||
return sessionStorage.getItem(TOKEN_KEY)
|
||||
return sessionStorage.getItem(TOKEN_KEY);
|
||||
}
|
||||
|
||||
export function clearInvitationToken(): void {
|
||||
sessionStorage.removeItem(TOKEN_KEY)
|
||||
sessionStorage.removeItem(TOKEN_KEY);
|
||||
}
|
||||
|
||||
170
remote-frontend/src/styles/diff-overrides.css
Normal file
170
remote-frontend/src/styles/diff-overrides.css
Normal file
@@ -0,0 +1,170 @@
|
||||
/* Diff view color overrides for dark theme - grey theme instead of GitHub blue */
|
||||
|
||||
/* Override the library's CSS variables with higher specificity */
|
||||
.diff-view-container
|
||||
.diff-tailwindcss-wrapper[data-theme="dark"]
|
||||
.diff-style-root,
|
||||
.diff-view-container .diff-style-root {
|
||||
--diff-add-content--: rgba(46, 160, 67, 0.15) !important;
|
||||
--diff-del-content--: rgba(248, 81, 73, 0.15) !important;
|
||||
--diff-add-content-highlight--: rgba(46, 160, 67, 0.4) !important;
|
||||
--diff-del-content-highlight--: rgba(248, 81, 73, 0.4) !important;
|
||||
--diff-add-lineNumber--: rgb(25, 61, 33) !important;
|
||||
--diff-del-lineNumber--: rgb(88, 42, 39) !important;
|
||||
--diff-plain-content--: #1e1e1e !important;
|
||||
--diff-plain-lineNumber--: #1e1e1e !important;
|
||||
--diff-plain-lineNumber-color--: #6e7681 !important;
|
||||
--diff-hunk-content--: #2a2a2a !important;
|
||||
--diff-hunk-lineNumber--: #2a2a2a !important;
|
||||
--diff-hunk-content-color--: #8b949e !important;
|
||||
--diff-empty-content--: #1e1e1e !important;
|
||||
--diff-expand-content--: #1e1e1e !important;
|
||||
--diff-expand-lineNumber--: #1e1e1e !important;
|
||||
--diff-expand-lineNumber-color--: #6e7681 !important;
|
||||
--diff-border--: #292929 !important;
|
||||
}
|
||||
|
||||
/* Override the library's dark theme background */
|
||||
.diff-view-container .diff-style-root,
|
||||
.diff-view-container [data-theme="dark"] .diff-style-root {
|
||||
background: #1e1e1e !important;
|
||||
}
|
||||
|
||||
/* Override syntax highlighting background */
|
||||
.diff-view-container .diff-line-syntax-raw .hljs,
|
||||
.diff-view-container [data-theme="dark"] .diff-line-syntax-raw .hljs {
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
/* Use JetBrains Mono for code */
|
||||
.diff-view-container .diff-line-content-raw,
|
||||
.diff-view-container .diff-line-syntax-raw,
|
||||
.diff-view-container .diff-line-num,
|
||||
.diff-view-container .diff-line-old-num,
|
||||
.diff-view-container .diff-line-new-num,
|
||||
.diff-view-container td {
|
||||
font-family: "JetBrains Mono", "Fira Code", "Consolas", monospace;
|
||||
font-size: 12px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
/* Line number styling */
|
||||
.diff-view-container .diff-line-num,
|
||||
.diff-view-container .diff-line-old-num,
|
||||
.diff-view-container .diff-line-new-num {
|
||||
color: #6e7681;
|
||||
padding: 0 8px;
|
||||
min-width: 40px;
|
||||
}
|
||||
|
||||
/* Hunk header styling */
|
||||
.diff-view-container .diff-line-hunk td,
|
||||
.diff-view-container .diff-line.diff-line-hunk td {
|
||||
color: var(--diff-hunk-content-color--);
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* Syntax highlighting - GitHub dark theme */
|
||||
.diff-view-container .hljs {
|
||||
color: #c9d1d9;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
.diff-view-container .hljs-doctag,
|
||||
.diff-view-container .hljs-keyword,
|
||||
.diff-view-container .hljs-meta .hljs-keyword,
|
||||
.diff-view-container .hljs-template-tag,
|
||||
.diff-view-container .hljs-template-variable,
|
||||
.diff-view-container .hljs-type,
|
||||
.diff-view-container .hljs-variable.language_ {
|
||||
color: #ff7b72;
|
||||
}
|
||||
|
||||
.diff-view-container .hljs-title,
|
||||
.diff-view-container .hljs-title.class_,
|
||||
.diff-view-container .hljs-title.class_.inherited__,
|
||||
.diff-view-container .hljs-title.function_ {
|
||||
color: #d2a8ff;
|
||||
}
|
||||
|
||||
.diff-view-container .hljs-attr,
|
||||
.diff-view-container .hljs-attribute,
|
||||
.diff-view-container .hljs-literal,
|
||||
.diff-view-container .hljs-meta,
|
||||
.diff-view-container .hljs-number,
|
||||
.diff-view-container .hljs-operator,
|
||||
.diff-view-container .hljs-variable,
|
||||
.diff-view-container .hljs-selector-attr,
|
||||
.diff-view-container .hljs-selector-class,
|
||||
.diff-view-container .hljs-selector-id {
|
||||
color: #79c0ff;
|
||||
}
|
||||
|
||||
.diff-view-container .hljs-regexp,
|
||||
.diff-view-container .hljs-string,
|
||||
.diff-view-container .hljs-meta .hljs-string {
|
||||
color: #a5d6ff;
|
||||
}
|
||||
|
||||
.diff-view-container .hljs-built_in,
|
||||
.diff-view-container .hljs-symbol {
|
||||
color: #ffa657;
|
||||
}
|
||||
|
||||
.diff-view-container .hljs-comment,
|
||||
.diff-view-container .hljs-code,
|
||||
.diff-view-container .hljs-formula {
|
||||
color: #8b949e;
|
||||
}
|
||||
|
||||
.diff-view-container .hljs-name,
|
||||
.diff-view-container .hljs-quote,
|
||||
.diff-view-container .hljs-selector-tag,
|
||||
.diff-view-container .hljs-selector-pseudo {
|
||||
color: #7ee787;
|
||||
}
|
||||
|
||||
/* Word-level diff highlights */
|
||||
.diff-view-container .diff-add-content-highlight {
|
||||
background-color: var(--diff-add-content-highlight--);
|
||||
}
|
||||
|
||||
.diff-view-container .diff-del-content-highlight {
|
||||
background-color: var(--diff-del-content-highlight--);
|
||||
}
|
||||
|
||||
/* Remove default table borders */
|
||||
.diff-view-container table {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.diff-view-container td {
|
||||
border: none;
|
||||
padding: 0 10px;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
/* Line content should use pre-wrap for proper whitespace */
|
||||
.diff-view-container .diff-line-content-raw,
|
||||
.diff-view-container .diff-line-syntax-raw {
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
/* +/- indicator styling */
|
||||
.diff-view-container .diff-line-add-sign,
|
||||
.diff-view-container .diff-line-del-sign {
|
||||
user-select: none;
|
||||
width: 1em;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
/* Ensure proper scrolling */
|
||||
.diff-view-container {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
/*
|
||||
.diff-view-container .diff-view-wrapper {
|
||||
min-width: max-content;
|
||||
} */
|
||||
17
remote-frontend/src/types/review.ts
Normal file
17
remote-frontend/src/types/review.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export interface CodeFragment {
|
||||
file: string;
|
||||
start_line: number;
|
||||
end_line: number;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface ReviewComment {
|
||||
comment: string;
|
||||
fragments: CodeFragment[];
|
||||
}
|
||||
|
||||
export interface ReviewResult {
|
||||
summary: string;
|
||||
comments: ReviewComment[];
|
||||
fileHashMap: Record<string, string>;
|
||||
}
|
||||
8
remote-frontend/src/vite-env.d.ts
vendored
8
remote-frontend/src/vite-env.d.ts
vendored
@@ -1,10 +1,12 @@
|
||||
/// <reference types="vite/client" />
|
||||
|
||||
interface ImportMetaEnv {
|
||||
readonly VITE_API_BASE_URL: string
|
||||
readonly VITE_APP_BASE_URL: string
|
||||
readonly VITE_API_BASE_URL: string;
|
||||
readonly VITE_APP_BASE_URL: string;
|
||||
readonly VITE_PUBLIC_POSTHOG_KEY: string;
|
||||
readonly VITE_PUBLIC_POSTHOG_HOST: string;
|
||||
}
|
||||
|
||||
interface ImportMeta {
|
||||
readonly env: ImportMetaEnv
|
||||
readonly env: ImportMetaEnv;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,51 @@ export default {
|
||||
"./src/**/*.{js,ts,jsx,tsx}",
|
||||
],
|
||||
theme: {
|
||||
extend: {},
|
||||
extend: {
|
||||
fontSize: {
|
||||
xs: ['0.625rem', { lineHeight: '0.875rem' }],
|
||||
sm: ['0.75rem', { lineHeight: '1rem' }],
|
||||
base: ['0.875rem', { lineHeight: '1.25rem' }],
|
||||
lg: ['1rem', { lineHeight: '1.5rem' }],
|
||||
xl: ['1.125rem', { lineHeight: '1.75rem' }],
|
||||
},
|
||||
colors: {
|
||||
border: "hsl(var(--border))",
|
||||
input: "hsl(var(--input))",
|
||||
ring: "hsl(var(--ring))",
|
||||
background: "hsl(var(--background))",
|
||||
foreground: "hsl(var(--foreground))",
|
||||
primary: {
|
||||
DEFAULT: "hsl(var(--primary))",
|
||||
foreground: "hsl(var(--primary-foreground))",
|
||||
},
|
||||
secondary: {
|
||||
DEFAULT: "hsl(var(--secondary))",
|
||||
foreground: "hsl(var(--secondary-foreground))",
|
||||
},
|
||||
destructive: {
|
||||
DEFAULT: "hsl(var(--destructive))",
|
||||
foreground: "hsl(var(--destructive-foreground))",
|
||||
},
|
||||
muted: {
|
||||
DEFAULT: "hsl(var(--muted))",
|
||||
foreground: "hsl(var(--muted-foreground))",
|
||||
},
|
||||
accent: {
|
||||
DEFAULT: "hsl(var(--accent))",
|
||||
foreground: "hsl(var(--accent-foreground))",
|
||||
},
|
||||
},
|
||||
borderRadius: {
|
||||
lg: "var(--radius)",
|
||||
md: "calc(var(--radius) - 2px)",
|
||||
sm: "calc(var(--radius) - 4px)",
|
||||
},
|
||||
fontFamily: {
|
||||
'mono': ['JetBrains Mono', 'Noto Emoji', 'monospace'],
|
||||
'sans': ['Inter', 'system-ui', 'sans-serif'],
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user